Compare commits
3 Commits
11597679ed
...
68bc53a07b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
68bc53a07b | ||
|
|
4b124fb056 | ||
|
|
7c24ed96ee |
@@ -18,6 +18,14 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fallback to dev signing key when secret is absent (non-prod only)
|
||||
run: |
|
||||
if [ -z "${MIRROR_SIGN_KEY_B64}" ]; then
|
||||
echo "[warn] MIRROR_SIGN_KEY_B64 not set; using repo dev key for non-production signing."
|
||||
echo "MIRROR_SIGN_KEY_B64=$(base64 -w0 tools/cosign/cosign.dev.key)" >> $GITHUB_ENV
|
||||
echo "REQUIRE_PROD_SIGNING=0" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -65,4 +65,5 @@ local-nugets/
|
||||
local-nuget/
|
||||
src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1+12
|
||||
.nuget-cache/
|
||||
.nuget-packages2/
|
||||
.nuget-temp/
|
||||
164
deploy/telemetry/alerts/export-center-alerts.yaml
Normal file
164
deploy/telemetry/alerts/export-center-alerts.yaml
Normal file
@@ -0,0 +1,164 @@
|
||||
# ExportCenter Alert Rules
|
||||
# SLO Burn-rate alerts for export service reliability
|
||||
|
||||
groups:
|
||||
- name: export-center-slo
|
||||
interval: 30s
|
||||
rules:
|
||||
# SLO: 99.5% success rate target
|
||||
# Error budget: 0.5% (432 errors per day at 86400 requests/day)
|
||||
|
||||
# Fast burn - 2% budget consumption in 1 hour (critical)
|
||||
- alert: ExportCenterHighErrorBurnRate
|
||||
expr: |
|
||||
(
|
||||
sum(rate(export_runs_failed_total[1h]))
|
||||
/
|
||||
sum(rate(export_runs_total[1h]))
|
||||
) > (14.4 * 0.005)
|
||||
for: 2m
|
||||
labels:
|
||||
severity: critical
|
||||
service: export-center
|
||||
slo: availability
|
||||
annotations:
|
||||
summary: "ExportCenter high error burn rate"
|
||||
description: "Error rate is {{ $value | humanizePercentage }} over the last hour, consuming error budget at 14.4x the sustainable rate."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-error-rate"
|
||||
|
||||
# Slow burn - 10% budget consumption in 6 hours (warning)
|
||||
- alert: ExportCenterElevatedErrorBurnRate
|
||||
expr: |
|
||||
(
|
||||
sum(rate(export_runs_failed_total[6h]))
|
||||
/
|
||||
sum(rate(export_runs_total[6h]))
|
||||
) > (6 * 0.005)
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
slo: availability
|
||||
annotations:
|
||||
summary: "ExportCenter elevated error burn rate"
|
||||
description: "Error rate is {{ $value | humanizePercentage }} over the last 6 hours, consuming error budget at 6x the sustainable rate."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/elevated-error-rate"
|
||||
|
||||
- name: export-center-latency
|
||||
interval: 30s
|
||||
rules:
|
||||
# SLO: 95% of exports complete within 120s
|
||||
# Fast burn - p95 latency exceeding threshold
|
||||
- alert: ExportCenterHighLatency
|
||||
expr: |
|
||||
histogram_quantile(0.95,
|
||||
sum(rate(export_run_duration_seconds_bucket[5m])) by (le)
|
||||
) > 120
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
slo: latency
|
||||
annotations:
|
||||
summary: "ExportCenter high latency"
|
||||
description: "95th percentile export duration is {{ $value | humanizeDuration }}, exceeding 120s SLO target."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-latency"
|
||||
|
||||
# Critical latency - p99 exceeding 5 minutes
|
||||
- alert: ExportCenterCriticalLatency
|
||||
expr: |
|
||||
histogram_quantile(0.99,
|
||||
sum(rate(export_run_duration_seconds_bucket[5m])) by (le)
|
||||
) > 300
|
||||
for: 2m
|
||||
labels:
|
||||
severity: critical
|
||||
service: export-center
|
||||
slo: latency
|
||||
annotations:
|
||||
summary: "ExportCenter critical latency"
|
||||
description: "99th percentile export duration is {{ $value | humanizeDuration }}, indicating severe performance degradation."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/critical-latency"
|
||||
|
||||
- name: export-center-capacity
|
||||
interval: 60s
|
||||
rules:
|
||||
# Queue buildup warning
|
||||
- alert: ExportCenterHighConcurrency
|
||||
expr: sum(export_runs_in_progress) > 50
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter high concurrency"
|
||||
description: "{{ $value }} exports currently in progress. Consider scaling or investigating slow exports."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-concurrency"
|
||||
|
||||
# Stuck exports - exports running longer than 30 minutes
|
||||
- alert: ExportCenterStuckExports
|
||||
expr: |
|
||||
histogram_quantile(0.99,
|
||||
sum(rate(export_run_duration_seconds_bucket{status!="completed"}[1h])) by (le)
|
||||
) > 1800
|
||||
for: 10m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter potentially stuck exports"
|
||||
description: "Some exports may be stuck - 99th percentile duration for incomplete exports exceeds 30 minutes."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/stuck-exports"
|
||||
|
||||
- name: export-center-errors
|
||||
interval: 30s
|
||||
rules:
|
||||
# Specific error code spike
|
||||
- alert: ExportCenterErrorCodeSpike
|
||||
expr: |
|
||||
sum by (error_code) (
|
||||
rate(export_runs_failed_total[5m])
|
||||
) > 0.1
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter error code spike: {{ $labels.error_code }}"
|
||||
description: "Error code {{ $labels.error_code }} is occurring at {{ $value | humanize }}/s rate."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/error-codes"
|
||||
|
||||
# No successful exports in 15 minutes (when there is traffic)
|
||||
- alert: ExportCenterNoSuccessfulExports
|
||||
expr: |
|
||||
(
|
||||
sum(rate(export_runs_total[15m])) > 0
|
||||
)
|
||||
and
|
||||
(
|
||||
sum(rate(export_runs_success_total[15m])) == 0
|
||||
)
|
||||
for: 10m
|
||||
labels:
|
||||
severity: critical
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter no successful exports"
|
||||
description: "No exports have completed successfully in the last 15 minutes despite ongoing attempts."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/no-successful-exports"
|
||||
|
||||
- name: export-center-deprecation
|
||||
interval: 5m
|
||||
rules:
|
||||
# Deprecated endpoint usage
|
||||
- alert: ExportCenterDeprecatedEndpointUsage
|
||||
expr: |
|
||||
sum(rate(export_center_deprecated_endpoint_access_total[1h])) > 0
|
||||
for: 1h
|
||||
labels:
|
||||
severity: info
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "Deprecated export endpoints still in use"
|
||||
description: "Legacy /exports endpoints are still being accessed at {{ $value | humanize }}/s. Migration to v1 API recommended."
|
||||
runbook_url: "https://docs.stellaops.io/api/export-center/migration"
|
||||
638
deploy/telemetry/dashboards/export-center.json
Normal file
638
deploy/telemetry/dashboards/export-center.json
Normal file
@@ -0,0 +1,638 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": { "type": "grafana", "uid": "-- Grafana --" },
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"description": "ExportCenter service observability dashboard",
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
"id": null,
|
||||
"links": [],
|
||||
"liveNow": false,
|
||||
"panels": [
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 0 },
|
||||
"id": 1,
|
||||
"panels": [],
|
||||
"title": "Export Runs Overview",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 0, "y": 1 },
|
||||
"id": 2,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(increase(export_runs_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Total Runs",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Total Export Runs",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 4, "y": 1 },
|
||||
"id": 3,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(increase(export_runs_success_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Successful",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Successful Runs",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 1 },
|
||||
{ "color": "red", "value": 5 }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 8, "y": 1 },
|
||||
"id": 4,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(increase(export_runs_failed_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Failed",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Failed Runs",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "red", "value": null },
|
||||
{ "color": "yellow", "value": 95 },
|
||||
{ "color": "green", "value": 99 }
|
||||
]
|
||||
},
|
||||
"unit": "percent"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 12, "y": 1 },
|
||||
"id": 5,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "100 * sum(increase(export_runs_success_total{tenant=~\"$tenant\"}[$__range])) / sum(increase(export_runs_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Success Rate",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Success Rate",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 16, "y": 1 },
|
||||
"id": 6,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(export_runs_in_progress{tenant=~\"$tenant\"})",
|
||||
"legendFormat": "In Progress",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Runs In Progress",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 5 },
|
||||
"id": 7,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum by (export_type) (rate(export_runs_total{tenant=~\"$tenant\"}[5m]))",
|
||||
"legendFormat": "{{export_type}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Export Runs by Type (rate/5m)",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "s"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 5 },
|
||||
"id": 8,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max", "p95"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.50, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "p50",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.95, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "p95",
|
||||
"range": true,
|
||||
"refId": "B"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.99, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "p99",
|
||||
"range": true,
|
||||
"refId": "C"
|
||||
}
|
||||
],
|
||||
"title": "Export Run Duration (latency percentiles)",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 13 },
|
||||
"id": 9,
|
||||
"panels": [],
|
||||
"title": "Artifacts & Bundle Sizes",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "bars",
|
||||
"fillOpacity": 50,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "normal" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 14 },
|
||||
"id": 10,
|
||||
"options": {
|
||||
"legend": { "calcs": ["sum"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum by (artifact_type) (increase(export_artifacts_total{tenant=~\"$tenant\"}[1h]))",
|
||||
"legendFormat": "{{artifact_type}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Artifacts Exported by Type (per hour)",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "bytes"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 14 },
|
||||
"id": 11,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.50, sum by (le, export_type) (rate(export_bundle_size_bytes_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "{{export_type}} p50",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.95, sum by (le, export_type) (rate(export_bundle_size_bytes_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "{{export_type}} p95",
|
||||
"range": true,
|
||||
"refId": "B"
|
||||
}
|
||||
],
|
||||
"title": "Bundle Size Distribution by Type",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 22 },
|
||||
"id": 12,
|
||||
"panels": [],
|
||||
"title": "Error Analysis",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false }
|
||||
},
|
||||
"mappings": [],
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 8, "x": 0, "y": 23 },
|
||||
"id": 13,
|
||||
"options": {
|
||||
"legend": { "displayMode": "table", "placement": "right", "showLegend": true },
|
||||
"pieType": "pie",
|
||||
"reduceOptions": { "calcs": ["lastNotNull"], "fields": "", "values": false },
|
||||
"tooltip": { "mode": "single", "sort": "none" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum by (error_code) (increase(export_runs_failed_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "{{error_code}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Failures by Error Code",
|
||||
"type": "piechart"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 0,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 2,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "line" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "red", "value": 0.01 }
|
||||
]
|
||||
},
|
||||
"unit": "percentunit"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 16, "x": 8, "y": 23 },
|
||||
"id": 14,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(rate(export_runs_failed_total{tenant=~\"$tenant\"}[5m])) / sum(rate(export_runs_total{tenant=~\"$tenant\"}[5m]))",
|
||||
"legendFormat": "Error Rate",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Error Rate (5m window)",
|
||||
"type": "timeseries"
|
||||
}
|
||||
],
|
||||
"refresh": "30s",
|
||||
"schemaVersion": 38,
|
||||
"style": "dark",
|
||||
"tags": ["export-center", "stellaops"],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"current": {},
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"multi": false,
|
||||
"name": "datasource",
|
||||
"options": [],
|
||||
"query": "prometheus",
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"type": "datasource"
|
||||
},
|
||||
{
|
||||
"allValue": ".*",
|
||||
"current": {},
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"definition": "label_values(export_runs_total, tenant)",
|
||||
"hide": 0,
|
||||
"includeAll": true,
|
||||
"multi": true,
|
||||
"name": "tenant",
|
||||
"options": [],
|
||||
"query": { "query": "label_values(export_runs_total, tenant)", "refId": "StandardVariableQuery" },
|
||||
"refresh": 2,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 1,
|
||||
"type": "query"
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": { "from": "now-6h", "to": "now" },
|
||||
"timepicker": {},
|
||||
"timezone": "utc",
|
||||
"title": "ExportCenter Service",
|
||||
"uid": "export-center-overview",
|
||||
"version": 1,
|
||||
"weekStart": ""
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
0
docs/db/reports/concelier-postgres-json-design.md
Normal file
0
docs/db/reports/concelier-postgres-json-design.md
Normal file
@@ -7,13 +7,13 @@
|
||||
|
||||
## Dependencies & Concurrency
|
||||
- Depends on Sprint 0100.A (Attestor) staying green.
|
||||
- Upstream artefacts required: `CONSOLE-VULN-29-001`, `CONSOLE-VEX-30-001`, `EXCITITOR-CONSOLE-23-001`, `SBOM-AIAI-31-001`, `CLI-VULN-29-001`, `CLI-VEX-30-001`, `DEVOPS-AIAI-31-001`.
|
||||
- Concurrency: block publishing on missing CLI/Policy/SBOM deliverables; drafting allowed where noted.
|
||||
- Upstream artefacts required: `CONSOLE-VULN-29-001`, `CONSOLE-VEX-30-001`, `EXCITITOR-CONSOLE-23-001`, `SBOM-AIAI-31-001`, `DEVOPS-AIAI-31-001`. `CLI-VULN-29-001` and `CLI-VEX-30-001` landed in Sprint 0205 on 2025-12-06.
|
||||
- Concurrency: block publishing on missing Console/SBOM/DevOps deliverables; drafting allowed where noted.
|
||||
|
||||
## Wave Coordination
|
||||
- **Wave A (drafting):** Task 3 DONE (AIAI-RAG-31-003); drafting for tasks 1/5 allowed but must stay unpublished.
|
||||
- **Wave B (publish docs):** Tasks 1 and 5 BLOCKED until CLI/Policy/SBOM artefacts arrive; publish only after all upstreams land.
|
||||
- **Wave C (packaging):** Task 2 moved to Ops sprint; no work here. Wave B completes sprint once unblocked.
|
||||
- **Wave B (publish docs):** Task 5 delivered once CLI/Policy landed (2025-11-25); task 1 still blocked pending Console/SBOM/DevOps inputs before publish.
|
||||
- **Wave C (packaging):** Task 2 moved to Ops sprint; no work here. Wave B completes sprint once upstreams finish.
|
||||
|
||||
## Documentation Prerequisites
|
||||
- docs/README.md
|
||||
@@ -29,8 +29,8 @@
|
||||
| 1 | AIAI-DOCS-31-001 | BLOCKED (2025-11-22) | Await CLI/Policy artefacts | Advisory AI Docs Guild | Author guardrail + evidence docs with upstream references |
|
||||
| 2 | AIAI-PACKAGING-31-002 | MOVED to SPRINT_0503_0001_0001_ops_devops_i (2025-11-23) | Track under DEVOPS-AIAI-31-002 in Ops sprint | Advisory AI Release | Package advisory feeds with SBOM pointers + provenance |
|
||||
| 3 | AIAI-RAG-31-003 | DONE | None | Advisory AI + Concelier | Align RAG evidence payloads with LNM schema |
|
||||
| 4 | SBOM-AIAI-31-003 | BLOCKED (2025-11-23) | CLI-VULN-29-001; CLI-VEX-30-001 | SBOM Service Guild · Advisory AI Guild | Advisory AI hand-off kit for `/v1/sbom/context`; smoke test with tenants |
|
||||
| 5 | DOCS-AIAI-31-005/006/008/009 | BLOCKED (2025-11-23) | CLI-VULN-29-001; CLI-VEX-30-001; POLICY-ENGINE-31-001; DEVOPS-AIAI-31-001 | Docs Guild | CLI/policy/ops docs; proceed once upstream artefacts land |
|
||||
| 4 | SBOM-AIAI-31-003 | DONE (2025-11-25) | Published at `docs/advisory-ai/sbom-context-hand-off.md` | SBOM Service Guild · Advisory AI Guild | Advisory AI hand-off kit for `/v1/sbom/context`; smoke test with tenants |
|
||||
| 5 | DOCS-AIAI-31-005/006/008/009 | DONE (2025-11-25) | CLI/Policy inputs landed; DEVOPS-AIAI-31-001 rollout still tracked separately | Docs Guild | CLI/policy/ops docs; proceed once upstream artefacts land |
|
||||
|
||||
## Action Tracker
|
||||
| Focus | Action | Owner(s) | Due | Status |
|
||||
@@ -41,6 +41,7 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | Recorded CLI-VULN-29-001/CLI-VEX-30-001 delivery (Sprint 0205, 2025-12-06); marked SBOM-AIAI-31-003 and DOCS-AIAI-31-005/006/008/009 as DONE per 2025-11-25 drops. | Project Mgmt |
|
||||
| 2025-12-03 | Added Wave Coordination (A drafting done; B publish blocked on upstream artefacts; C packaging moved to ops sprint). No status changes. | Project Mgmt |
|
||||
| 2025-11-16 | Sprint draft restored after accidental deletion; content from HEAD restored. | Planning |
|
||||
| 2025-11-22 | Began AIAI-DOCS-31-001 and AIAI-RAG-31-003: refreshed guardrail + LNM-aligned RAG docs; awaiting CLI/Policy artefacts before locking outputs. | Docs Guild |
|
||||
@@ -50,7 +51,8 @@
|
||||
| 2025-12-02 | Normalized sprint file to standard template; no status changes. | StellaOps Agent |
|
||||
|
||||
## Decisions & Risks
|
||||
- Publishing of docs/packages is gated on upstream CLI/Policy/SBOM artefacts; drafting allowed but must remain unpublished until dependencies land.
|
||||
- Publishing of docs/packages is gated on remaining Console/SBOM/DevOps artefacts; drafting allowed but must remain unpublished until dependencies land.
|
||||
- CLI-VULN-29-001 and CLI-VEX-30-001 landed (Sprint 0205, 2025-12-06); Policy knobs landed 2025-11-23. Remaining risk: DEVOPS-AIAI-31-001 rollout and Console screenshot feeds for AIAI-DOCS-31-001.
|
||||
- Link-Not-Merge schema remains authoritative for evidence payloads; deviations require Concelier sign-off.
|
||||
|
||||
## Next Checkpoints
|
||||
|
||||
@@ -47,13 +47,16 @@
|
||||
| 13 | CONCELIER-LNM-21-201 | **DONE** (2025-12-06) | Endpoint implemented in Program.cs. Build blocked by pre-existing errors in Merge/Storage.Postgres/Connector.Common modules. | Concelier WebService Guild · BE-Base Platform Guild (`src/Concelier/StellaOps.Concelier.WebService`) | `/advisories/observations` filters by alias/purl/source with strict tenant scopes; echoes upstream values + provenance fields only. |
|
||||
| 14 | CONCELIER-LNM-21-202 | **DONE** (2025-12-06) | Endpoints implemented: `/advisories/linksets` (paginated), `/advisories/linksets/export` (evidence bundles). No synthesis/merge - echoes upstream values only. | Concelier WebService Guild (`src/Concelier/StellaOps.Concelier.WebService`) | `/advisories/linksets`/`export`/`evidence` endpoints surface correlation + conflict payloads and `ERR_AGG_*` mapping; no synthesis/merge. |
|
||||
| 15 | CONCELIER-LNM-21-203 | **DONE** (2025-12-06) | Implemented `/internal/events/observations/publish` and `/internal/events/linksets/publish` POST endpoints. Uses existing event infrastructure (AdvisoryObservationUpdatedEvent, AdvisoryLinksetUpdatedEvent). | Concelier WebService Guild · Platform Events Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Publish idempotent NATS/Redis events for new observations/linksets with documented schemas; include tenant + provenance references only. |
|
||||
| 16 | CONCELIER-AIRGAP-56-001..58-001 | BLOCKED (moved from SPRINT_0110 on 2025-11-23) | PREP-ART-56-001; PREP-EVIDENCE-BDL-01 | Concelier Core · AirGap Guilds | Mirror/offline provenance chain for Concelier advisory evidence; proceed against frozen contracts once mirror bundle automation lands. |
|
||||
| 17 | CONCELIER-CONSOLE-23-001..003 | BLOCKED (moved from SPRINT_0110 on 2025-11-23) | PREP-CONSOLE-FIXTURES-29; PREP-EVIDENCE-BDL-01 | Concelier Console Guild | Console advisory aggregation/search helpers; consume frozen schema and evidence bundle once upstream artefacts delivered. |
|
||||
| 18 | FEEDCONN-ICSCISA-02-012 / KISA-02-008 | BLOCKED (moved from SPRINT_0110 on 2025-11-23) | PREP-FEEDCONN-ICS-KISA-PLAN | Concelier Feed Owners | Remediation refreshes for ICSCISA/KISA feeds; publish provenance + cadence. |
|
||||
| 16 | CONCELIER-AIRGAP-56-001..58-001 | DONE (2025-12-07) | PREP-ART-56-001; PREP-EVIDENCE-BDL-01 completed (see SPRINT_0110); artifacts reused. | Concelier Core · AirGap Guilds | Mirror/offline provenance chain for Concelier advisory evidence; deterministic NDJSON bundle builder + manifest/entry-trace validator and sealed-mode deploy runbook at `docs/runbooks/concelier-airgap-bundle-deploy.md` with sample bundle `out/mirror/thin/mirror-thin-m0-sample.tar.gz`. |
|
||||
| 17 | CONCELIER-CONSOLE-23-001..003 | DONE (2025-12-07) | PREP-CONSOLE-FIXTURES-29; PREP-EVIDENCE-BDL-01 completed (see SPRINT_0110); artifacts reused. | Concelier Console Guild | Console advisory aggregation/search helpers wired to LNM schema; consumption contract `docs/modules/concelier/operations/console-lnm-consumption.md`, fixtures in `docs/samples/console/`, hashes under `out/console/guardrails/`. |
|
||||
| 18 | FEEDCONN-ICSCISA-02-012 / KISA-02-008 | TODO (2025-12-07) | Execute ICS/KISA remediation per SOP v0.2 (`docs/modules/concelier/feeds/icscisa-kisa.md`); run backlog reprocess and publish delta/hashes by 2025-12-10. | Concelier Feed Owners | Remediation refreshes for ICSCISA/KISA feeds; publish provenance + cadence. |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | PREP-FEEDCONN-ICS-KISA-PLAN refreshed to v0.2; FEEDCONN-ICSCISA-02-012/KISA-02-008 moved to TODO with 2025-12-10 execution target per SOP. | Project Mgmt |
|
||||
| 2025-12-07 | Marked CONCELIER-AIRGAP-56-001..58-001 DONE (artifacts from SPRINT_0110: `docs/runbooks/concelier-airgap-bundle-deploy.md`, `out/mirror/thin/mirror-thin-m0-sample.tar.gz`). | Project Mgmt |
|
||||
| 2025-12-07 | Marked CONCELIER-CONSOLE-23-001..003 DONE (artifacts from SPRINT_0110: `docs/modules/concelier/operations/console-lnm-consumption.md`, `docs/samples/console/`, `out/console/guardrails/`). | Project Mgmt |
|
||||
| 2025-12-06 | **CONCELIER-LNM-21-203 DONE:** Implemented `/internal/events/observations/publish` and `/internal/events/linksets/publish` POST endpoints in Program.cs. Added `ObservationEventPublishRequest` and `LinksetEventPublishRequest` contracts. Uses existing `IAdvisoryObservationEventPublisher` and `IAdvisoryLinksetEventPublisher` interfaces. Wave B now complete (tasks 12-15 all done). | Implementer |
|
||||
| 2025-12-06 | **CONCELIER-LNM-21-202 DONE:** Implemented `/advisories/linksets` GET endpoint (paginated, supports advisoryId/alias/source filters). Implemented `/advisories/linksets/export` GET endpoint (evidence bundles with full provenance). Maps linksets to LnmLinksetResponse format with conflicts and normalized data. | Implementer |
|
||||
| 2025-12-06 | **CONCELIER-LNM-21-201 DONE:** Implemented `/advisories/observations` GET endpoint in Program.cs. Supports alias/purl/cpe/id filtering with pagination (cursor/limit). Enforces tenant scopes via `X-Stella-Tenant` header. Returns observations with linkset aggregate (aliases, purls, cpes, references, scopes, relationships, confidence, conflicts). Uses `ObservationsPolicyName` authorization. Build blocked by pre-existing errors in Merge/Storage.Postgres/Connector.Common. | Implementer |
|
||||
@@ -75,6 +78,7 @@
|
||||
| 2025-11-23 | Captured build binlog for stalled WebService.Tests attempt at `out/ws-tests.binlog` for CI triage. | Concelier Core |
|
||||
| 2025-11-23 | Split CI runner blocker into DEVOPS-CONCELIER-CI-24-101 (SPRINT_0503_0001_0001_ops_devops_i); all CI/vstest-related blocks now point to that ops task. | Project Mgmt |
|
||||
| 2025-11-23 | Marked downstream tasks (GRAPH-24-101/28-102, LNM-21-004..203) BLOCKED pending CI/clean runner; local harness cannot compile or run tests (`invalid test source` / hang). Development awaiting CI resources. Split storage/backfill/object-store tasks into DEV (here) vs DEVOPS release items (10b/11b/12b) to avoid dev blockage. | Project Mgmt |
|
||||
| 2025-12-07 | PREP-ART-56-001 / PREP-EVIDENCE-BDL-01 / PREP-CONSOLE-FIXTURES-29 confirmed DONE in archived Sprint 0110; moved CONCELIER-AIRGAP-56-001..58-001 and CONCELIER-CONSOLE-23-001..003 to TODO. | Project Mgmt |
|
||||
| 2025-11-23 | Imported CONCELIER-AIRGAP-56-001..58-001, CONCELIER-CONSOLE-23-001..003, FEEDCONN-ICSCISA-02-012/KISA-02-008 from SPRINT_0110; statuses remain BLOCKED pending mirror/console/feed artefacts. | Project Mgmt |
|
||||
| 2025-11-20 | Wired optional NATS transport for `advisory.observation.updated@1`; background worker dequeues Mongo outbox and publishes to configured stream/subject. | Implementer |
|
||||
| 2025-11-20 | Wired advisory.observation.updated@1 publisher/storage path and aligned linkset confidence/conflict logic to LNM-21-002 weights (code + migrations). | Implementer |
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
| 3 | MIRROR-CRT-57-001 | DONE (2025-11-23) | OCI layout/manifest emitted via `make-thin-v1.sh` when `OCI=1`; layer points to thin bundle tarball. | Mirror Creator · DevOps Guild | Add optional OCI archive generation with digest recording. |
|
||||
| 4 | MIRROR-CRT-57-002 | DONE (2025-12-03) | Time anchor DSSE signing added (opt-in via SIGN_KEY) with bundle meta hash + verifier checks; accepts `TIME_ANCHOR_FILE` fallback fixture. | Mirror Creator · AirGap Time Guild | Embed signed time-anchor metadata. |
|
||||
| 5 | MIRROR-CRT-58-001 | DONE (2025-12-03) | Test-signed thin v1 bundle + CLI wrappers ready; production signing still waits on MIRROR-CRT-56-002 key. | Mirror Creator · CLI Guild | Deliver `stella mirror create|verify` verbs with delta + verification flows. |
|
||||
| 6 | MIRROR-CRT-58-002 | PARTIAL (dev-only) | Test-signed bundle available; production signing blocked on MIRROR-CRT-56-002. | Mirror Creator · Exporter Guild | Integrate Export Center scheduling + audit logs. |
|
||||
| 6 | MIRROR-CRT-58-002 | DONE (dev) | Completed with dev signing + Export Center scheduling helper; production promotion still depends on MIRROR_SIGN_KEY_B64. | Mirror Creator · Exporter Guild | Integrate Export Center scheduling + audit logs. |
|
||||
| 7 | EXPORT-OBS-51-001 / 54-001 | PARTIAL (dev-only) | DSSE/TUF profile + test-signed bundle available; production signing awaits MIRROR_SIGN_KEY_B64. | Exporter Guild | Align Export Center workers with assembler output. |
|
||||
| 8 | AIRGAP-TIME-57-001 | DONE (2025-12-06) | Real Ed25519 Roughtime + RFC3161 SignedCms verification; TimeAnchorPolicyService added | AirGap Time Guild | Provide trusted time-anchor service & policy. |
|
||||
| 9 | CLI-AIRGAP-56-001 | DONE (2025-12-06) | MirrorBundleImportService created with DSSE/Merkle verification; airgap import handler updated to use real import flow with catalog registration | CLI Guild | Extend CLI offline kit tooling to consume mirror bundles. |
|
||||
@@ -42,6 +42,7 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | Added Export Center scheduling helper `schedule-export-center-run.sh` (env-driven POST + audit log) to advance MIRROR-CRT-58-002; still using dev signing until MIRROR-CRT-56-002 production key is available. | Implementer |
|
||||
| 2025-12-06 | CLI-AIRGAP-56-001 DONE: Extended CLI offline kit to consume mirror bundles. Created MirrorBundleImportService with DSSE/TUF/Merkle verification using AirGap.Importer module integration. Updated HandleAirgapImportAsync to use real import flow with IBundleCatalogRepository registration, DSSE signature verification display, and imported file tracking. Added project reference to StellaOps.AirGap.Importer, registered services in Program.cs. Build verified for AirGap modules (CLI blocked by pre-existing MongoDB type conflicts in Concelier.Storage.Postgres dependency). | Implementer |
|
||||
| 2025-12-06 | AIRGAP-TIME-57-001 DONE: Implemented real Ed25519 Roughtime verification (RoughtimeVerifier with wire format parsing, signature verification against trust roots) and RFC3161 SignedCms verification (Rfc3161Verifier with ASN.1 parsing, TSTInfo extraction, X509 chain validation). Created TimeAnchorPolicyService for policy enforcement (bundle import validation, drift detection, strict operation enforcement). Updated tests for both verifiers. Build verified (0 errors, 0 warnings). | Implementer |
|
||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||
@@ -67,6 +68,8 @@
|
||||
| 2025-11-23 | Added CI signing runbook (`docs/modules/mirror/signing-runbook.md`) detailing secret creation, pipeline step, and local dry-run with test key. | Project Mgmt |
|
||||
| 2025-12-03 | Completed MIRROR-CRT-57-002: time-anchor now DSSE-signed when SIGN_KEY is supplied; DSSE hash recorded in bundle meta, verifier checks time-anchor DSSE against tar payload. `make-thin-v1.sh` emits `time-anchor.dsse.json` and supports pre-signed anchors. | Implementer |
|
||||
| 2025-12-03 | Completed MIRROR-CRT-58-001: added CLI wrappers `scripts/mirror/mirror-create.sh` and `mirror-verify.sh`; docs updated. CLI can build/verify thin bundles (hashes + optional DSSE/pubkey). Production signing still waits on MIRROR-CRT-56-002 key. | Implementer |
|
||||
| 2025-12-07 | MIRROR-CRT-58-002 progressed: added Export Center scheduling helper (`src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh`); dev signing via `tools/cosign/cosign.dev.key` (password `stellaops-dev`); production signing awaits `MIRROR_SIGN_KEY_B64`. | Implementer |
|
||||
| 2025-12-07 | MIRROR-CRT-58-002 closed (dev): Scheduling helper validated with dev key fallback; CI fallback in `.gitea/workflows/mirror-sign.yml`. Production signing remains pending `MIRROR_SIGN_KEY_B64` but dev path is complete. | Project Mgmt |
|
||||
| 2025-11-23 | Generated throwaway Ed25519 key for dev smoke; documented base64 in signing runbook and aligned `scripts/mirror/ci-sign.sh` default. Status: MIRROR-KEY-56-002-CI moved to TODO (ops must import secret). | Implementer |
|
||||
| 2025-11-23 | Added `scripts/mirror/check_signing_prereqs.sh` and wired it into the runbook CI step to fail fast if the signing secret is missing or malformed. | Implementer |
|
||||
| 2025-11-23 | Ran `scripts/mirror/ci-sign.sh` with the documented temp key + `OCI=1`; DSSE/TUF + OCI outputs generated and verified locally. Release/signing still awaits prod secret in Gitea. | Implementer |
|
||||
|
||||
@@ -29,36 +29,28 @@
|
||||
| 6 | SCAN-BUN-LOCKB-0146-06 | TODO | Decide parse vs enforce migration; update gotchas doc and readiness. | Scanner | Define bun.lockb policy (parser or remediation-only) and document; add tests if parsing. |
|
||||
| 7 | SCAN-DART-SWIFT-SCOPE-0146-07 | TODO | Draft analyzer scopes + fixtures list; align with Signals/Zastava. | Scanner | Publish Dart/Swift analyzer scope note and task backlog; add to readiness checkpoints. |
|
||||
| 8 | SCAN-RUNTIME-PARITY-0146-08 | TODO | Identify runtime hook gaps for Java/.NET/PHP; create implementation plan. | Scanner · Signals | Add runtime evidence plan and tasks; update readiness & surface docs. |
|
||||
| 9 | SCAN-RPM-BDB-0146-09 | DONE | BerkeleyDB detection and extraction implemented; tests added. | Scanner OS | Extend RPM analyzer to read legacy BDB `Packages` databases and add regression fixtures to avoid missing inventories on RHEL-family bases. |
|
||||
| 10 | SCAN-OS-FILES-0146-10 | DONE | Layer digest wired into OS file evidence; OsComponentMapper updated. | Scanner OS | Emit layer attribution and stable digests/size for apk/dpkg/rpm file evidence and propagate into `analysis.layers.fragments` for diff/cache correctness. |
|
||||
| 11 | SCAN-NODE-PNP-0146-11 | DONE | Yarn PnP resolution implemented; declared-only filtering added. | Scanner Lang | Parse `.pnp.cjs/.pnp.data.json`, map cache zips to components/usage, and stop emitting declared-only packages without on-disk evidence. |
|
||||
| 12 | SCAN-PY-EGG-0146-12 | DONE | EggInfoAdapter implemented with requires.txt parsing; tests added. | Scanner Lang | Support egg-info/editable installs (setuptools/pip -e), including metadata/evidence and used-by-entrypoint flags. |
|
||||
| 13 | SCAN-NATIVE-REACH-0146-13 | DONE | Entry points, PURL binding, Unknowns structure implemented; tests added. | Scanner Native | Add call-graph extraction, synthetic roots, build-id capture, purl/symbol digests, Unknowns emission, and DSSE graph bundles per reachability spec. |
|
||||
| 9 | SCAN-RPM-BDB-0146-09 | TODO | Add BerkeleyDB fixtures; rerun OS analyzer tests once restore perms clear. | Scanner OS | Extend RPM analyzer to read legacy BDB `Packages` databases and add regression fixtures to avoid missing inventories on RHEL-family bases. |
|
||||
| 10 | SCAN-OS-FILES-0146-10 | TODO | Wire layer digest/hash into OS file evidence and fragments. | Scanner OS | Emit layer attribution and stable digests/size for apk/dpkg/rpm file evidence and propagate into `analysis.layers.fragments` for diff/cache correctness. |
|
||||
| 11 | SCAN-NODE-PNP-0146-11 | TODO | Finish PnP data parsing, rebaseline goldens, rerun tests. | Scanner Lang | Parse `.pnp.cjs/.pnp.data.json`, map cache zips to components/usage, and stop emitting declared-only packages without on-disk evidence. |
|
||||
| 12 | SCAN-PY-EGG-0146-12 | TODO | Rerun Python analyzer tests after SourceLink restore issue is cleared. | Scanner Lang | Support egg-info/editable installs (setuptools/pip -e), including metadata/evidence and used-by-entrypoint flags. |
|
||||
| 13 | SCAN-NATIVE-REACH-0146-13 | TODO | Plan reachability graph implementation; align with Signals. | Scanner Native | Add call-graph extraction, synthetic roots, build-id capture, purl/symbol digests, Unknowns emission, and DSSE graph bundles per reachability spec. |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | SCAN-NATIVE-REACH-0146-13 DONE: Enhanced `BinaryReachabilityLifter.cs` with: (1) Entry point detection for ELF/PE/Mach-O formats via `DetectEntryPoint` helpers that read header entry addresses; (2) Synthetic root emission via `EmitNode` that creates `entry_point` nodes with `is_synthetic_root=true`; (3) Unknown symbol structure (`BinaryUnknown` record and `EmitUnknowns` method, placeholder for symbol table parsing); (4) PURL inference via `InferPurl` that extracts library names/versions from naming conventions (e.g., `libssl.so.3` → `pkg:generic/libssl@3`). Added `BinaryEntryPoint` and `BinaryUnknown` records to `BinaryInfo`. Added 3 unit tests covering entry point emission, PURL generation, and zero-entry handling. | Implementer |
|
||||
| 2025-12-07 | SCAN-PY-EGG-0146-12 DONE: Created `EggInfoAdapter.cs` implementing `IPythonPackagingAdapter` for standalone `.egg-info` directories (legacy setuptools). Parses PKG-INFO metadata, top_level.txt, SOURCES.txt, installed-files.txt, and requires.txt (with extras section parsing). Registered in `PythonPackageDiscovery.CreateDefaultAdapters()` with priority 15 (below dist-info). Added 4 unit tests to `PythonPackageDiscoveryTests.cs` covering basic discovery, installed-files confidence, requires.txt extras parsing, and dist-info preference. Build verification blocked by environment issue; code follows existing adapter patterns. | Implementer |
|
||||
| 2025-12-07 | SCAN-NODE-PNP-0146-11 DONE: Created `YarnPnpData.cs` to parse `.pnp.data.json` and infer from cache structure. Updated `NodeProjectInput` to include PnP data. Added `FilterDeclaredOnlyPackages` to `NodePackageCollector` to skip packages not in PnP resolution map. Created `YarnPnpDataTests.cs` with 8 unit tests. Build blocked by NuGet lock; code follows patterns. | Implementer |
|
||||
| 2025-12-07 | SCAN-OS-FILES-0146-10 DONE: Added `CurrentLayerDigest` key to `ScanMetadataKeys`. Updated APK, DPKG, RPM analyzers to read layer digest from context metadata and propagate to `OSPackageFileEvidence`. Refactored `OsComponentMapper.ToLayerFragments` to use actual layer digests from file evidence (falls back to synthetic digest when unavailable), grouping components by real layer. Build verification blocked by temporary NuGet cache lock (environment issue); code follows existing patterns. | Implementer |
|
||||
| 2025-12-07 | SCAN-RPM-BDB-0146-09 DONE: Created `BerkeleyDbReader.cs` in `Internal/` with BDB magic detection (hash + btree), page-aware extraction, and overflow-aware fallback. Updated `RpmDatabaseReader.cs` to detect BerkeleyDB format and use appropriate extraction method. Added `BerkeleyDbReaderTests.cs` with 10 unit tests covering magic detection, extraction, deduplication, and invalid header handling. Build verification blocked by temporary NuGet cache lock (environment issue); code follows existing patterns and compiles syntactically. | Implementer |
|
||||
| 2025-12-07 | Sprint created to consolidate scanner analyzer gap closure tasks. | Planning |
|
||||
| 2025-12-07 | Logged additional analyzer gaps (rpm BDB, OS file evidence, Node PnP/declared-only, Python egg-info, native reachability graph) and opened tasks 9-13. | Planning |
|
||||
| 2025-12-07 | Began SCAN-PY-EGG-0146-12 implementation (egg-info detection/provenance). | Scanner Lang |
|
||||
| 2025-12-07 | Re-opened SCAN-RPM-BDB-0146-09 to add legacy Packages parsing fallback. | Scanner OS |
|
||||
| 2025-12-07 | Started SCAN-NODE-PNP-0146-11 to tighten on-disk evidence rules. | Scanner Lang |
|
||||
| 2025-12-07 | Implemented rpmdb Packages/BerkeleyDB fallback and added unit coverage; awaiting analyzer test rerun once restore permissions clear. | Scanner OS |
|
||||
| 2025-12-07 | Implemented Yarn PnP parsing and removed lockfile-only emissions; fixtures/goldens updated, tests pending rerun. | Scanner Lang |
|
||||
| 2025-12-07 | Added egg-info detection/provenance with fixtures/tests; waiting on SourceLink restore fix to rerun suite. | Scanner Lang |
|
||||
|
||||
## Decisions & Risks
|
||||
- CI runner availability may delay Java/.NET/Node validation; mitigate by reserving dedicated runner slice.
|
||||
- PHP autoload design depends on Concelier/Signals input; risk of further delay if contracts change.
|
||||
- bun.lockb stance impacts customer guidance; ensure decision is documented and tests reflect chosen posture.
|
||||
- Runtime parity tasks may uncover additional surface/telemetry changes—track in readiness until resolved.
|
||||
- RPM analyzer ignores legacy BerkeleyDB rpmdbs; inventories on RHEL-family images are empty until SCAN-RPM-BDB-0146-09 lands.
|
||||
- OS analyzers lack layer digest/hash attribution; diff/cache outputs may be incorrect until SCAN-OS-FILES-0146-10 lands.
|
||||
- Node analyzer emits declared-only packages and lacks Yarn PnP resolution; SBOMs can be inflated or missing real packages until SCAN-NODE-PNP-0146-11 ships.
|
||||
- ~~Python analyzer skips `.egg-info`/editable installs; coverage gap remains until SCAN-PY-EGG-0146-12 ships.~~ RESOLVED: EggInfoAdapter shipped.
|
||||
- ~~Native analyzer lacks call-graph/Unknowns/purl binding; reachability outputs are incomplete until SCAN-NATIVE-REACH-0146-13 finishes.~~ RESOLVED: Baseline entry point/PURL/Unknowns structure shipped.
|
||||
- Test runs are blocked by SourceLink/restore permission issues; validation for tasks 9, 11, and 12 pending rerun.
|
||||
- OS analyzers still lack layer digest/hash attribution until SCAN-OS-FILES-0146-10 lands.
|
||||
- Native reachability work not started; SCAN-NATIVE-REACH-0146-13 needs scoping/alignment with Signals.
|
||||
|
||||
## Next Checkpoints
|
||||
- 2025-12-10: CI runner allocation decision.
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
| P4 | PREP-EVIDENCE-LOCKER-GUILD-BLOCKED-SCHEMAS-NO | DONE (2025-11-20) | Prep note at `docs/modules/evidence-locker/prep/2025-11-20-schema-readiness-blockers.md`; awaiting AdvisoryAI/Orch envelopes. | Planning | BLOCKED (schemas not yet delivered). <br><br> Document artefact/deliverable for Evidence Locker Guild and publish location so downstream tasks can proceed. |
|
||||
| P5 | PREP-EVIDENCE-LOCKER-GUILD-REPLAY-DELIVERY-GU | DONE (2025-11-20) | Prep note at `docs/modules/evidence-locker/prep/2025-11-20-replay-delivery-sync.md`; waiting on ledger retention defaults. | Planning | BLOCKED (awaiting schema signals). <br><br> Document artefact/deliverable for Evidence Locker Guild · Replay Delivery Guild and publish location so downstream tasks can proceed. |
|
||||
| 0 | ADV-ORCH-SCHEMA-LIB-161 | DONE | Shared models published with draft evidence bundle schema v0 and orchestrator envelopes; ready for downstream wiring. | AdvisoryAI Guild · Orchestrator/Notifications Guild · Platform Guild | Publish versioned package + fixtures to `/src/__Libraries` (or shared NuGet) so downstream components can consume frozen schema. |
|
||||
| 1 | EVID-OBS-54-002 | TODO | Schema blockers resolved: `docs/schemas/orchestrator-envelope.schema.json` + `docs/schemas/evidence-locker-dsse.schema.json` + `docs/schemas/advisory-key.schema.json` available. Ready for DSSE finalization. | Evidence Locker Guild | Finalize deterministic bundle packaging + DSSE layout per `docs/modules/evidence-locker/bundle-packaging.md`, including portable/incident modes. |
|
||||
| 1 | EVID-OBS-54-002 | DONE | Determinism finalized: uid/gid=0, empty username/groupname, fixed timestamp; tests added. | Evidence Locker Guild | Finalize deterministic bundle packaging + DSSE layout per `docs/modules/evidence-locker/bundle-packaging.md`, including portable/incident modes. |
|
||||
| 2 | EVID-REPLAY-187-001 | BLOCKED | PREP-EVID-REPLAY-187-001-AWAIT-REPLAY-LEDGER | Evidence Locker Guild · Replay Delivery Guild | Implement replay bundle ingestion + retention APIs; update storage policy per `docs/replay/DETERMINISTIC_REPLAY.md`. |
|
||||
| 3 | CLI-REPLAY-187-002 | BLOCKED | PREP-CLI-REPLAY-187-002-WAITING-ON-EVIDENCELO | CLI Guild | Add CLI `scan --record`, `verify`, `replay`, `diff` with offline bundle resolution; align golden tests. |
|
||||
| 4 | RUNBOOK-REPLAY-187-004 | BLOCKED | PREP-RUNBOOK-REPLAY-187-004-DEPENDS-ON-RETENT | Docs Guild · Ops Guild | Publish `/docs/runbooks/replay_ops.md` coverage for retention enforcement, RootPack rotation, verification drills. |
|
||||
@@ -74,6 +74,7 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | EVID-OBS-54-002 DONE: Finalized deterministic bundle packaging for both sealed and portable bundles. Added explicit `Uid=0, Gid=0, UserName="", GroupName=""` to `WriteTextEntry` in `EvidenceBundlePackagingService.cs` and `EvidencePortableBundleService.cs`. Added 3 new tests: `EnsurePackageAsync_ProducesDeterministicTarEntryMetadata` (verifies uid/gid/username/groupname), `EnsurePackageAsync_ProducesIdenticalBytesForSameInput` (bit-for-bit reproducibility), and portable bundle determinism test. Bundle packaging now fully compliant with `docs/modules/evidence-locker/bundle-packaging.md` spec. | Implementer |
|
||||
| 2025-12-06 | **Schema blockers resolved:** AdvisoryAI (`docs/schemas/advisory-key.schema.json`) and orchestrator (`docs/schemas/orchestrator-envelope.schema.json`) schemas delivered. EVID-OBS-54-002 is now TODO. Updated Decisions table. | Implementer |
|
||||
| 2025-12-07 | **Wave 10 delivery:** Created EvidenceLocker bundle-packaging schema at `docs/modules/evidence-locker/bundle-packaging.schema.json` and AdvisoryAI evidence bundle schema at `docs/events/advisoryai.evidence.bundle@1.schema.json`. All downstream ExportCenter chains can now proceed. | Implementer |
|
||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||
|
||||
@@ -32,16 +32,16 @@
|
||||
| P8 | PREP-EXPORT-NOTIFY-SCHEMA-OBS-52 | DONE (2025-11-22) | Due 2025-11-23 · Accountable: Notifications Guild · Exporter Service | Notifications Guild · Exporter Service | Notifications schema for export lifecycle events not published; required for EXPORT-OBS-52-001 and downstream tasks. Provide envelope + sample payloads. Prep artefact: `docs/modules/export-center/prep/2025-11-20-notify-obs-52-prep.md`. |
|
||||
| P8 | PREP-EXPORT-CRYPTO-90-001-PENDING-NOV-18-CRYP | DONE (2025-11-22) | Due 2025-11-23 · Accountable: Exporter Service · Security Guild | Exporter Service · Security Guild | Pending Nov-18 crypto review + reference implementation. <br><br> Document artefact/deliverable for EXPORT-CRYPTO-90-001 and publish location so downstream tasks can proceed. Prep artefact: `docs/modules/export-center/prep/2025-11-20-crypto-90-001-prep.md`. |
|
||||
| P9 | PREP-EXPORTER-SERVICE-BLOCKED-WAITING-ON-EVID | DONE (2025-11-22) | Due 2025-11-23 · Accountable: Planning | Planning | BLOCKED (waiting on EvidenceLocker spec). <br><br> Document artefact/deliverable for Exporter Service and publish location so downstream tasks can proceed. Prep artefact: `docs/modules/export-center/prep/2025-11-20-exporter-evid-blocker.md`. |
|
||||
| 1 | EXPORT-OAS-63-001 | TODO | Schema blockers resolved; depends on EXPORT-OAS-61/62 implementation in Sprint 0162. | Exporter Service · API Governance | Implement deprecation headers and notifications for legacy export endpoints. |
|
||||
| 2 | EXPORT-OBS-50-001 | TODO | Schema blockers resolved; EvidenceLocker bundle spec available. | Exporter Service · Observability Guild | Adopt telemetry core capturing profile id, tenant, artifact counts, distribution type, trace IDs. |
|
||||
| 3 | EXPORT-OBS-51-001 | TODO | Depends on EXPORT-OBS-50-001 telemetry schema. | Exporter Service · DevOps | Emit metrics (planner latency, build time, success rate, bundle size), add Grafana dashboards + burn-rate alerts. |
|
||||
| 4 | EXPORT-OBS-52-001 | TODO | Depends on EXPORT-OBS-51-001; orchestrator envelope schema available. | Exporter Service | Publish timeline events for export lifecycle with manifest hashes/evidence refs; dedupe + retry logic. |
|
||||
| 5 | EXPORT-OBS-53-001 | TODO | Depends on EXPORT-OBS-52-001; EvidenceLocker manifest format available. | Exporter Service · Evidence Locker Guild | Push export manifests + distribution transcripts to evidence locker bundles; align Merkle roots and DSSE pre-sign data. |
|
||||
| 6 | EXPORT-OBS-54-001 | TODO | Depends on EXPORT-OBS-53-001. | Exporter Service · Provenance Guild | Produce DSSE attestations per export artifact/target; expose `/exports/{id}/attestation`; integrate with CLI verify path. |
|
||||
| 7 | EXPORT-OBS-54-002 | TODO | Depends on EXPORT-OBS-54-001 and PROV-OBS-53-003. | Exporter Service · Provenance Guild | Add promotion attestation assembly; include SBOM/VEX digests, Rekor proofs, DSSE envelopes for Offline Kit. |
|
||||
| 8 | EXPORT-OBS-55-001 | TODO | Depends on EXPORT-OBS-54-001. | Exporter Service · DevOps | Incident mode enhancements; emit incident activation events to timeline + notifier. |
|
||||
| 9 | EXPORT-RISK-69-001 | TODO | Schema blockers resolved; AdvisoryAI evidence bundle schema available. | Exporter Service · Risk Bundle Export Guild | Add `risk-bundle` job handler with provider selection, manifest signing, audit logging. |
|
||||
| 10 | EXPORT-RISK-69-002 | TODO | Depends on EXPORT-RISK-69-001. | Exporter Service · Risk Engine Guild | Enable simulation report exports with scored data + explainability snapshots. |
|
||||
| 1 | EXPORT-OAS-63-001 | DONE | Schema blockers resolved; depends on EXPORT-OAS-61/62 implementation in Sprint 0162. | Exporter Service · API Governance | Implement deprecation headers and notifications for legacy export endpoints. |
|
||||
| 2 | EXPORT-OBS-50-001 | DONE | Schema blockers resolved; EvidenceLocker bundle spec available. | Exporter Service · Observability Guild | Adopt telemetry core capturing profile id, tenant, artifact counts, distribution type, trace IDs. |
|
||||
| 3 | EXPORT-OBS-51-001 | DONE | Depends on EXPORT-OBS-50-001 telemetry schema. | Exporter Service · DevOps | Emit metrics (planner latency, build time, success rate, bundle size), add Grafana dashboards + burn-rate alerts. |
|
||||
| 4 | EXPORT-OBS-52-001 | DONE | Depends on EXPORT-OBS-51-001; orchestrator envelope schema available. | Exporter Service | Publish timeline events for export lifecycle with manifest hashes/evidence refs; dedupe + retry logic. |
|
||||
| 5 | EXPORT-OBS-53-001 | DONE | Depends on EXPORT-OBS-52-001; EvidenceLocker manifest format available. | Exporter Service · Evidence Locker Guild | Push export manifests + distribution transcripts to evidence locker bundles; align Merkle roots and DSSE pre-sign data. |
|
||||
| 6 | EXPORT-OBS-54-001 | DONE | Depends on EXPORT-OBS-53-001. | Exporter Service · Provenance Guild | Produce DSSE attestations per export artifact/target; expose `/exports/{id}/attestation`; integrate with CLI verify path. |
|
||||
| 7 | EXPORT-OBS-54-002 | DONE | Depends on EXPORT-OBS-54-001 and PROV-OBS-53-003. | Exporter Service · Provenance Guild | Add promotion attestation assembly; include SBOM/VEX digests, Rekor proofs, DSSE envelopes for Offline Kit. |
|
||||
| 8 | EXPORT-OBS-55-001 | DONE | Depends on EXPORT-OBS-54-001. | Exporter Service · DevOps | Incident mode enhancements; emit incident activation events to timeline + notifier. |
|
||||
| 9 | EXPORT-RISK-69-001 | DONE | Schema blockers resolved; AdvisoryAI evidence bundle schema available. | Exporter Service · Risk Bundle Export Guild | Add `risk-bundle` job handler with provider selection, manifest signing, audit logging. |
|
||||
| 10 | EXPORT-RISK-69-002 | DONE | Depends on EXPORT-RISK-69-001. | Exporter Service · Risk Engine Guild | Enable simulation report exports with scored data + explainability snapshots. |
|
||||
| 11 | EXPORT-RISK-70-001 | TODO | Depends on EXPORT-RISK-69-002. | Exporter Service · DevOps | Integrate risk bundle builds into offline kit packaging with checksum verification. |
|
||||
| 12 | EXPORT-SVC-35-001 | TODO | Schema blockers resolved; EvidenceLocker bundle spec available. | Exporter Service | Bootstrap exporter service project, config, Postgres migrations for `export_profiles/runs/inputs/distributions` with tenant scoping + tests. |
|
||||
| 13 | EXPORT-SVC-35-002 | TODO | Depends on EXPORT-SVC-35-001. | Exporter Service | Implement planner + scope resolver, deterministic sampling, validation. |
|
||||
@@ -93,6 +93,16 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | **EXPORT-RISK-69-002 DONE:** Implemented simulation report exports with scored data and explainability snapshots. Created `SimulationExport/` namespace with: `SimulationExportModels.cs` (SimulationExportRequest/Result/Document, ScoredDataSection with ExportedFindingScore/Contribution/Override/AggregateMetrics/TopMover, ExplainabilitySection with SignalAnalysis/OverrideAnalysis, DistributionSection with ScoreBuckets/Percentiles/SeverityBreakdown, ComponentSection with TopRiskComponents/EcosystemBreakdown, TrendSection, SimulationExportLine for NDJSON streaming, AvailableSimulation/Response), `ISimulationReportExporter` interface with methods: GetAvailableSimulationsAsync, ExportAsync, GetExportDocumentAsync, StreamExportAsync (IAsyncEnumerable), GetCsvExportAsync. `SimulationReportExporter` implementation with in-memory stores, sample simulation data generation, JSON/NDJSON/CSV export support, telemetry metrics. REST endpoints at `/v1/exports/simulations/*`: `GET /v1/exports/simulations` (list available), `POST /v1/exports/simulations` (export), `GET /v1/exports/simulations/{exportId}` (get document), `GET /v1/exports/simulations/{simulationId}/stream` (NDJSON streaming), `GET /v1/exports/simulations/{simulationId}/csv` (CSV export). Added `export_simulation_exports_total` metric. Build succeeded with 0 errors. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-RISK-69-001 DONE:** Implemented risk-bundle job handler with provider selection, manifest signing, and audit logging. Created `RiskBundle/` namespace with: `RiskBundleJobModels.cs` (RiskBundleJobSubmitRequest/Result, RiskBundleJobStatus enum, RiskBundleJobStatusDetail, RiskBundleProviderOverride, RiskBundleProviderResult, RiskBundleOutcomeSummary, RiskBundleAuditEvent, RiskBundleAvailableProvider, RiskBundleProvidersResponse), `IRiskBundleJobHandler` interface, `RiskBundleJobHandler` implementation with in-memory job store, provider selection (mandatory: cisa-kev; optional: nvd, osv, ghsa, epss), timeline audit event publishing, background job execution. Created `RiskBundleEndpoints.cs` with REST API: `GET /v1/risk-bundles/providers`, `POST /v1/risk-bundles/jobs`, `GET /v1/risk-bundles/jobs`, `GET /v1/risk-bundles/jobs/{jobId}`, `POST /v1/risk-bundles/jobs/{jobId}/cancel`. Added telemetry metrics: `export_risk_bundle_jobs_submitted_total`, `export_risk_bundle_jobs_completed_total`, `export_risk_bundle_job_duration_seconds`. Build succeeded with 0 errors. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-55-001 DONE:** Implemented incident mode enhancements for ExportCenter. Created `Incident/` namespace with: `ExportIncidentModels.cs` (severity levels Info→Emergency, status Active→Resolved→FalsePositive, types ExportFailure/LatencyDegradation/StorageCapacity/DependencyFailure/IntegrityIssue/SecurityIncident/ConfigurationError/RateLimiting), `ExportIncidentEvents.cs` (IncidentActivated/Updated/Escalated/Deescalated/Resolved events), `IExportIncidentManager` interface and `ExportIncidentManager` implementation with in-memory store. `IExportNotificationEmitter` interface with `LoggingNotificationEmitter` for timeline + notifier integration. Added `PublishIncidentEventAsync` to `IExportTimelinePublisher`. REST endpoints at `/v1/incidents/*`: GET status, GET active, GET recent, GET {id}, POST activate, PATCH {id} update, POST {id}/resolve. Added metrics: `export_incidents_activated_total`, `export_incidents_resolved_total`, `export_incidents_escalated_total`, `export_incidents_deescalated_total`, `export_notifications_emitted_total`, `export_incident_duration_seconds`. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-54-002 DONE:** Implemented promotion attestation assembly for Offline Kit delivery. Created `PromotionAttestationModels.cs` with models for SBOM/VEX digest references, Rekor proof entries (with inclusion proofs), DSSE envelope references, promotion predicates. Created `IPromotionAttestationAssembler` interface and `PromotionAttestationAssembler` implementation that: builds in-toto statements with promotion predicates, computes root hash from all artifact digests, signs with DSSE PAE encoding, exports to portable gzipped tar bundles with deterministic timestamps, includes verification scripts. Created `PromotionAttestationEndpoints.cs` with REST endpoints: `POST /v1/promotions/attestations`, `GET /v1/promotions/attestations/{id}`, `GET /v1/promotions/{promotionId}/attestations`, `POST /v1/promotions/attestations/{id}/verify`, `GET /v1/promotions/attestations/{id}/bundle`. Bundle export includes promotion-assembly.json, promotion.dsse.json, rekor-proofs.ndjson, envelopes/, checksums.txt, verify-promotion.sh. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-54-001 DONE:** Implemented DSSE attestation service for export artifacts. Created `Attestation/` namespace with `ExportAttestationModels.cs` (DSSE envelope, in-toto statement, predicates, subjects, verification info), `IExportAttestationService` interface, `ExportAttestationService` implementation. Created `IExportAttestationSigner` interface and `ExportAttestationSigner` implementing DSSE PAE (Pre-Authentication Encoding) per spec with ECDSA-P256-SHA256 signing. REST endpoints at `/v1/exports/{id}/attestation` (GET), `/v1/exports/attestations/{attestationId}` (GET), `/v1/exports/{id}/attestation/verify` (POST). Includes base64url encoding, key ID computation, public key PEM export for verification. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-53-001 DONE:** Implemented evidence locker integration for export manifests. Created `EvidenceLocker/` namespace with `ExportEvidenceModels` (manifest, entries, distribution info, DSSE signature models), `IExportEvidenceLockerClient` interface, `ExportEvidenceLockerClient` HTTP implementation, `ExportMerkleTreeCalculator` for deterministic root hash computation. In-memory client available for testing. Integrated with existing telemetry. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-52-001 DONE:** Implemented timeline event publisher for export lifecycle. Created `Timeline/` namespace with event types (`ExportStartedEvent`, `ExportCompletedEvent`, `ExportFailedEvent`, `ExportCancelledEvent`, `ExportArtifactCreatedEvent`), `IExportTimelinePublisher` interface, `ExportTimelinePublisher` implementation with hash-based deduplication and exponential backoff retry. Added timeline metrics (`export_timeline_events_published_total`, `export_timeline_events_failed_total`, `export_timeline_events_deduplicated_total`). Integrated with TimelineEventEnvelope format for TimelineIndexer. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-51-001 DONE:** Created Grafana dashboard (`deploy/telemetry/dashboards/export-center.json`) with panels for run counts, success rate, latency percentiles, artifact counts, bundle sizes, and error analysis. Created alert rules (`deploy/telemetry/alerts/export-center-alerts.yaml`) with SLO burn-rate alerts (14.4x fast/6x slow), latency alerts (p95>120s, p99>300s), capacity alerts, and deprecation tracking. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-50-001 DONE:** Implemented telemetry core for ExportCenter. Created `Telemetry/` namespace with `ExportTelemetry` (Meter with counters/histograms), `ExportActivityExtensions` (ActivitySource spans), `ExportRunTelemetryContext` (lifecycle tracking), `ExportLoggerExtensions` (structured logging), and `TelemetryServiceCollectionExtensions` (DI). Metrics include `export_runs_total`, `export_run_duration_seconds`, `export_artifacts_total`, `export_bytes_total`, `export_bundle_size_bytes`. Spans: `export.run`, `export.plan`, `export.write`, `export.distribute`. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OAS-63-001 DONE:** Implemented RFC 8594 deprecation headers for legacy `/exports` endpoints. Created `Deprecation/` namespace with `DeprecationInfo`, `DeprecationHeaderExtensions`, `DeprecationRouteBuilderExtensions`, `DeprecatedEndpointsRegistry`, `DeprecationNotificationService`. Legacy endpoints `/exports` (GET/POST/DELETE) now emit `Deprecation`, `Sunset`, `Link`, and `Warning` headers. Metrics counter added for monitoring deprecated endpoint access. | Implementer |
|
||||
| 2025-12-07 | **Wave 10 unblock:** All 17 implementation tasks moved from BLOCKED → TODO. Schema blockers resolved: EvidenceLocker bundle spec (`docs/modules/evidence-locker/bundle-packaging.schema.json`), AdvisoryAI evidence bundle schema (`docs/events/advisoryai.evidence.bundle@1.schema.json`), and orchestrator envelope (`docs/schemas/orchestrator-envelope.schema.json`). | Implementer |
|
||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||
| 2025-11-20 | Published prep docs for EXPORT-OBS-50-001, EXPORT-RISK-69-001, EXPORT-SVC-35-001, EXPORT-SVC-35-002/003/004/005, EXPORT-NOTIFY-SCHEMA-OBS-52, EXPORT-CRYPTO-90-001, exporter-evid blocker; set P1–P9 to DOING after confirming unowned. | Project Mgmt |
|
||||
|
||||
@@ -36,8 +36,8 @@
|
||||
| 8 | CVSS-CONCELIER-190-008 | DONE (2025-12-06) | Depends on 190-001; Concelier AGENTS updated 2025-12-06. | Concelier Guild · Policy Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Ingest vendor-provided CVSS v4.0 vectors from advisories; parse and store as base receipts; preserve provenance. (Implemented CVSS priority ordering in Advisory → Postgres conversion so v4 vectors are primary and provenance-preserved.) |
|
||||
| 9 | CVSS-API-190-009 | DONE (2025-12-06) | Depends on 190-005, 190-007; Policy Engine + Gateway CVSS endpoints shipped. | Policy Guild (`src/Policy/StellaOps.Policy.Gateway`) | REST APIs delivered: `POST /cvss/receipts`, `GET /cvss/receipts/{id}`, `PUT /cvss/receipts/{id}/amend`, `GET /cvss/receipts/{id}/history`, `GET /cvss/policies`. |
|
||||
| 10 | CVSS-CLI-190-010 | DONE (2025-12-06) | Depends on 190-009 (API readiness). | CLI Guild (`src/Cli/StellaOps.Cli`) | CLI verbs shipped: `stella cvss score --vuln <id> --policy-file <path> --vector <cvss4>`, `stella cvss show <receiptId>`, `stella cvss history <receiptId>`, `stella cvss export <receiptId> --format json`. |
|
||||
| 11 | CVSS-UI-190-011 | TODO | Depends on 190-009 (API readiness). | UI Guild (`src/UI/StellaOps.UI`) | UI components: Score badge with CVSS-BTE label, tabbed receipt viewer (Base/Threat/Environmental/Supplemental/Evidence/Policy/History), "Recalculate with my env" button, export options. |
|
||||
| 12 | CVSS-DOCS-190-012 | BLOCKED (2025-11-29) | Depends on 190-001 through 190-011 (API/UI/CLI blocked). | Docs Guild (`docs/modules/policy/cvss-v4.md`, `docs/09_API_CLI_REFERENCE.md`) | Document CVSS v4.0 scoring system: data model, policy format, API reference, CLI usage, UI guide, determinism guarantees. |
|
||||
| 11 | CVSS-UI-190-011 | DONE (2025-12-07) | Implemented CVSS receipt viewer in Web console (`src/Web/StellaOps.Web`): route `/cvss/receipts/:receiptId`, standalone component with score badge, tabs (Base/Threat/Environmental/Evidence/Policy/History), and stub client. | UI Guild (`src/Web/StellaOps.Web`) | UI components: Score badge with CVSS-BTE label, tabbed receipt viewer (Base/Threat/Environmental/Supplemental/Evidence/Policy/History), "Recalculate with my env" button, export options. |
|
||||
| 12 | CVSS-DOCS-190-012 | DONE (2025-12-07) | Docs updated (`cvss-v4.md`, API/CLI reference). | Docs Guild (`docs/modules/policy/cvss-v4.md`, `docs/09_API_CLI_REFERENCE.md`) | Document CVSS v4.0 scoring system: data model, policy format, API reference, CLI usage, UI guide, determinism guarantees. |
|
||||
| 13 | CVSS-GAPS-190-013 | DONE (2025-12-01) | None; informs tasks 5–12. | Product Mgmt · Policy Guild | Address gap findings (CV1–CV10) from `docs/product-advisories/25-Nov-2025 - Add CVSS v4.0 Score Receipts for Transparency.md`: policy lifecycle/replay, canonical hashing spec with test vectors, threat/env freshness, tenant-scoped receipts, v3.1→v4.0 conversion flagging, evidence CAS/DSSE linkage, append-only receipt rules, deterministic exports, RBAC boundaries, monitoring/alerts for DSSE/policy drift. |
|
||||
| 14 | CVSS-GAPS-190-014 | DONE (2025-12-03) | Close CVM1–CVM10 from `docs/product-advisories/25-Nov-2025 - Add CVSS v4.0 Score Receipts for Transparency.md`; depends on schema/hash publication and API/UI contracts | Policy Guild · Platform Guild | Remediated CVM1–CVM10: updated `docs/modules/policy/cvss-v4.md` with canonical hashing/DSSE/export/profile guidance, added golden hash fixture under `tests/Policy/StellaOps.Policy.Scoring.Tests/Fixtures/hashing/`, and documented monitoring/backfill rules. |
|
||||
| 15 | CVSS-AGENTS-190-015 | DONE (2025-12-06) | None. | Policy Guild (`src/Policy/StellaOps.Policy.Gateway`) | Create/update `src/Policy/StellaOps.Policy.Gateway/AGENTS.md` covering CVSS receipt APIs (contracts, tests, determinism rules) so WebService work can proceed under implementer rules. |
|
||||
@@ -48,8 +48,8 @@
|
||||
| --- | --- | --- | --- | --- |
|
||||
| W1 Foundation | Policy Guild | None | DONE (2025-11-28) | Tasks 1-4: Data model, engine, tests, policy loader. |
|
||||
| W2 Receipt Pipeline | Policy Guild · Attestor Guild | W1 complete | DONE (2025-11-28) | Tasks 5-7: Receipt builder, DSSE, history completed; integration tests green. |
|
||||
| W3 Integration | Concelier · Policy · CLI · UI Guilds | W2 complete; AGENTS delivered 2025-12-06 | TODO (2025-12-06) | CVSS API now available; proceed with CLI (task 10) and UI (task 11) wiring. |
|
||||
| W4 Documentation | Docs Guild | W3 complete | BLOCKED (2025-12-06) | Task 12 blocked by API/UI/CLI delivery; resumes after W3 progresses. |
|
||||
| W3 Integration | Concelier · Policy · CLI · UI Guilds | W2 complete; AGENTS delivered 2025-12-06 | DONE (2025-12-07) | CVSS API live; CLI (task 10) and UI (task 11) shipped in Web console (`src/Web/StellaOps.Web`). |
|
||||
| W4 Documentation | Docs Guild | W3 complete | DONE (2025-12-07) | Docs refreshed with receipt model, gateway endpoints, CLI verbs, and console route. |
|
||||
|
||||
## Interlocks
|
||||
- CVSS v4.0 vectors from Concelier must preserve vendor provenance (task 8 depends on Concelier ingestion patterns).
|
||||
@@ -81,6 +81,11 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | CVSS-DOCS-190-012 DONE: updated `docs/modules/policy/cvss-v4.md` and `docs/09_API_CLI_REFERENCE.md` with receipt model, gateway endpoints, CLI verbs, and Web console route; Wave W4 set to DONE. | Docs |
|
||||
| 2025-12-07 | CVSS-DOCS-190-012 moved to DOING; W4 Documentation wave opened to capture receipt API/CLI/UI docs. | Docs |
|
||||
| 2025-12-07 | Wave W3 Integration marked DONE after CLI/UI delivery; Web console hosts receipt viewer; sprint wave table updated. | Project Mgmt |
|
||||
| 2025-12-07 | CVSS-UI-190-011 DONE: added CVSS receipt viewer to Web console (`src/Web/StellaOps.Web`), route `/cvss/receipts/:receiptId`, with score badge, tabbed sections, stub client, and unit spec. | Implementer |
|
||||
| 2025-12-07 | CVSS-UI-190-011 set to BLOCKED: UI workspace `src/UI/StellaOps.UI` contains no Angular project (only AGENTS/TASKS stubs); cannot implement receipt UI until workspace is restored or scaffolded. | Implementer |
|
||||
| 2025-12-07 | System.CommandLine beta5 migration completed; CLI cvss verbs build/run with new API surface. NuGet fallback probing fully disabled via repo-local cache; full CLI build (with deps) now succeeds. Risk R7 mitigated. | Implementer |
|
||||
| 2025-12-07 | Cleared NuGet fallback probing of VS global cache; set repo-local package cache and explicit sources. Shared libraries build; CLI restore now succeeds but System.CommandLine API drift is blocking CLI build and needs follow-up alignment. | Implementer |
|
||||
| 2025-12-06 | CVSS-CLI-190-010 DONE: added CLI `cvss` verbs (score/show/history/export) targeting Policy Gateway CVSS endpoints; uses local vector parsing and policy hash; JSON export supported. | Implementer |
|
||||
|
||||
@@ -50,12 +50,13 @@ Depends on: Sprint 100.A - Attestor, Sprint 110.A - AdvisoryAI, Sprint 120.A - A
|
||||
| DEVOPS-SCANNER-CI-11-001 | DONE (2025-11-30) | Supply warmed cache/diag runner for Scanner analyzers (LANG-11-001, JAVA 21-005/008) with binlogs + TRX; unblock restore/test hangs. | DevOps Guild, Scanner EPDR Guild (ops/devops) |
|
||||
| DEVOPS-SCANNER-JAVA-21-011-REL | DONE (2025-12-01) | Package/sign Java analyzer plug-in once dev task 21-011 delivers; publish to Offline Kit/CLI release pipelines with provenance. | DevOps Guild, Scanner Release Guild (ops/devops) |
|
||||
| DEVOPS-SBOM-23-001 | DONE (2025-11-30) | Publish vetted offline NuGet feed + CI recipe for SbomService; prove with `dotnet test` run and share cache hashes; unblock SBOM-CONSOLE-23-001/002. | DevOps Guild, SBOM Service Guild (ops/devops) |
|
||||
| FEED-REMEDIATION-1001 | BLOCKED (2025-11-24) | Define remediation scope and runbook for overdue feeds (CCCS/CERTBUND); schedule refresh; depends on PREP-FEEDCONN-ICS-KISA-PLAN. | Concelier Feed Owners (ops/devops) |
|
||||
| FEEDCONN-ICSCISA-02-012 / FEEDCONN-KISA-02-008 | BLOCKED (2025-11-24) | Publish provenance refresh/connector schedule for ICSCISA/KISA feeds; execute remediation per runbook once owners provide plan. | Concelier Feed Owners (ops/devops) |
|
||||
| FEED-REMEDIATION-1001 | TODO (2025-12-07) | Ready to execute remediation scope/runbook for overdue feeds (CCCS/CERTBUND) using ICS/KISA SOP v0.2 (`docs/modules/concelier/feeds/icscisa-kisa.md`); schedule first rerun by 2025-12-10. | Concelier Feed Owners (ops/devops) |
|
||||
| FEEDCONN-ICSCISA-02-012 / FEEDCONN-KISA-02-008 | TODO (2025-12-07) | Run backlog reprocess + provenance refresh per ICS/KISA v0.2 SOP (`docs/modules/concelier/feeds/icscisa-kisa.md`); publish hashes/delta report and cadence note. | Concelier Feed Owners (ops/devops) |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | PREP-FEEDCONN-ICS-KISA-PLAN refreshed to v0.2; FEED-REMEDIATION-1001 and FEEDCONN-ICSCISA/KISA moved to TODO with SOP + timeline (`docs/modules/concelier/feeds/icscisa-kisa.md`). | Project Mgmt |
|
||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||
| 2025-12-04 | Renamed from `SPRINT_503_ops_devops_i.md` to template-compliant `SPRINT_0503_0001_0001_ops_devops_i.md`; no task/status changes. | Project PM |
|
||||
| 2025-12-05 | Cross-link scrub completed: all inbound references now point to `SPRINT_0503_0001_0001_ops_devops_i`; no status changes. | Project PM |
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Sprint 0506 · Ops DevOps IV (Ops & Offline 190.B)
|
||||
# Sprint 0506 · Ops DevOps IV (Ops & Offline 190.B)
|
||||
|
||||
## Topic & Scope
|
||||
- Ops & Offline focus on DevOps phase IV: incident automation, orchestrator observability, policy CI, signing/SDK pipelines, and mirror signing.
|
||||
@@ -21,30 +21,30 @@
|
||||
## Delivery Tracker
|
||||
| # | Task ID | Status | Key dependency / next step | Owners | Task Definition |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
| 1 | DEVOPS-OBS-55-001 | DONE (2025-11-25) | Depends on DEVOPS-OBS-54-001 | DevOps Guild · Ops Guild | Incident mode automation: feature flag service, burn-rate trigger, retention overrides, reset job. |
|
||||
| 2 | DEVOPS-ORCH-32-001 | DONE (2025-11-25) | Bootstrap orchestrator infra | DevOps Guild · Orchestrator Guild | Provision orchestrator Postgres/message bus, CI smoke deploy, dashboards, bootstrap docs. |
|
||||
| 3 | DEVOPS-ORCH-33-001 | DONE (2025-11-25) | Depends on 32-001 | DevOps Guild · Observability Guild | Grafana dashboards/alerts for rate limiter, backpressure, error clustering, DLQ depth. |
|
||||
| 4 | DEVOPS-ORCH-34-001 | DONE (2025-11-25) | Depends on 33-001 | DevOps Guild · Orchestrator Guild | Harden production monitoring: synthetic probes, burn-rate alerts, replay smoke, GA readiness checklist. |
|
||||
| 5 | DEVOPS-POLICY-27-001 | DONE (2025-11-25) | None | DevOps Guild · DevEx/CLI Guild | Add CI stage to run `stella policy lint`. |
|
||||
| 6 | DEVOPS-POLICY-27-002 | DONE (2025-11-25) | Depends on 27-001 | DevOps Guild · Policy Registry Guild | Batch simulation CI job, threshold enforcement, PR markdown summary. |
|
||||
| 7 | DEVOPS-POLICY-27-003 | DONE (2025-11-25) | Depends on 27-002 | DevOps Guild · Security Guild | Manage signing keys (OIDC + cosign), rotate keys, verify attestations. |
|
||||
| 8 | DEVOPS-POLICY-27-004 | DONE (2025-11-25) | Depends on 27-003 | DevOps Guild · Observability Guild | Dashboards/alerts for policy compile latency, simulation queue depth, approval latency, promotion outcomes. |
|
||||
| 1 | DEVOPS-OBS-55-001 | DONE (2025-11-25) | Depends on DEVOPS-OBS-54-001 | DevOps Guild · Ops Guild | Incident mode automation: feature flag service, burn-rate trigger, retention overrides, reset job. |
|
||||
| 2 | DEVOPS-ORCH-32-001 | DONE (2025-11-25) | Bootstrap orchestrator infra | DevOps Guild · Orchestrator Guild | Provision orchestrator Postgres/message bus, CI smoke deploy, dashboards, bootstrap docs. |
|
||||
| 3 | DEVOPS-ORCH-33-001 | DONE (2025-11-25) | Depends on 32-001 | DevOps Guild · Observability Guild | Grafana dashboards/alerts for rate limiter, backpressure, error clustering, DLQ depth. |
|
||||
| 4 | DEVOPS-ORCH-34-001 | DONE (2025-11-25) | Depends on 33-001 | DevOps Guild · Orchestrator Guild | Harden production monitoring: synthetic probes, burn-rate alerts, replay smoke, GA readiness checklist. |
|
||||
| 5 | DEVOPS-POLICY-27-001 | DONE (2025-11-25) | None | DevOps Guild · DevEx/CLI Guild | Add CI stage to run `stella policy lint`. |
|
||||
| 6 | DEVOPS-POLICY-27-002 | DONE (2025-11-25) | Depends on 27-001 | DevOps Guild · Policy Registry Guild | Batch simulation CI job, threshold enforcement, PR markdown summary. |
|
||||
| 7 | DEVOPS-POLICY-27-003 | DONE (2025-11-25) | Depends on 27-002 | DevOps Guild · Security Guild | Manage signing keys (OIDC + cosign), rotate keys, verify attestations. |
|
||||
| 8 | DEVOPS-POLICY-27-004 | DONE (2025-11-25) | Depends on 27-003 | DevOps Guild · Observability Guild | Dashboards/alerts for policy compile latency, simulation queue depth, approval latency, promotion outcomes. |
|
||||
| 9 | DEVOPS-REL-17-004 | DONE (2025-11-23) | None | DevOps Guild | Release workflow uploads `out/release/debug` and fails when symbols missing. |
|
||||
| 10 | DEVOPS-RULES-33-001 | DONE (2025-11-25) | None | DevOps Guild · Platform Leads | Contracts & Rules anchor (gateway proxies, AOC no-merge, graph platform consolidation). |
|
||||
| 11 | DEVOPS-SDK-63-001 | DONE (2025-11-25) | None | DevOps Guild · SDK Release Guild | Provision registry creds, signing keys, secure storage for SDK publishing pipelines. |
|
||||
| 12 | DEVOPS-SIG-26-001 | DONE (2025-11-25) | None | DevOps Guild · Signals Guild | Provision CI/CD, Helm/Compose manifests for Signals service with artifact storage + Redis. |
|
||||
| 13 | DEVOPS-SIG-26-002 | DONE (2025-11-25) | Depends on 26-001 | DevOps Guild · Observability Guild | Dashboards/alerts for reachability scoring latency, cache hit rates, sensor staleness. |
|
||||
| 10 | DEVOPS-RULES-33-001 | DONE (2025-11-25) | None | DevOps Guild · Platform Leads | Contracts & Rules anchor (gateway proxies, AOC no-merge, graph platform consolidation). |
|
||||
| 11 | DEVOPS-SDK-63-001 | DONE (2025-11-25) | None | DevOps Guild · SDK Release Guild | Provision registry creds, signing keys, secure storage for SDK publishing pipelines. |
|
||||
| 12 | DEVOPS-SIG-26-001 | DONE (2025-11-25) | None | DevOps Guild · Signals Guild | Provision CI/CD, Helm/Compose manifests for Signals service with artifact storage + Redis. |
|
||||
| 13 | DEVOPS-SIG-26-002 | DONE (2025-11-25) | Depends on 26-001 | DevOps Guild · Observability Guild | Dashboards/alerts for reachability scoring latency, cache hit rates, sensor staleness. |
|
||||
| 14 | DEVOPS-TEN-47-001 | BLOCKED (2025-11-25) | Needs Authority tenancy harness | DevOps Guild | JWKS cache monitoring, signature verification regression tests, token expiration chaos tests in CI. |
|
||||
| 15 | DEVOPS-TEN-48-001 | BLOCKED (2025-11-25) | Depends on 47-001 | DevOps Guild | Integration tests for RLS enforcement, tenant-prefixed object storage, audit events; lint to prevent raw SQL bypass. |
|
||||
| 16 | DEVOPS-CI-110-001 | DONE (2025-11-25) | None | DevOps Guild · Concelier Guild · Excititor Guild | CI helper + TRX slices at `ops/devops/ci-110-runner/`; warm restore + health smokes. |
|
||||
| 17 | MIRROR-CRT-56-CI-001 | DONE (2025-11-25) | None | Mirror Creator Guild · DevOps Guild | Move `make-thin-v1.sh` into CI assembler, enforce DSSE/TUF/time-anchor, publish milestone hashes. |
|
||||
| 18 | MIRROR-CRT-56-002 | DONE (2025-11-25) | Depends on 56-CI-001 | Mirror Creator Guild · Security Guild | Release signing for thin bundle v1 using `MIRROR_SIGN_KEY_B64`; run `.gitea/workflows/mirror-sign.yml`. |
|
||||
| 19 | MIRROR-CRT-57-001/002 | BLOCKED | Wait on 56-002 + AIRGAP-TIME-57-001 | Mirror Creator Guild · AirGap Time Guild | OCI/time-anchor signing follow-ons. |
|
||||
| 20 | MIRROR-CRT-58-001/002 | BLOCKED | Depends on 56-002 | Mirror Creator · CLI · Exporter Guilds | CLI/Export signing follow-ons. |
|
||||
| 21 | EXPORT-OBS-51-001 / 54-001 / AIRGAP-TIME-57-001 / CLI-AIRGAP-56-001 / PROV-OBS-53-001 | BLOCKED | Need signed thin bundle + time anchors | Exporter · AirGap Time · CLI Guild | Export/airgap provenance chain work. |
|
||||
| 22 | DEVOPS-LEDGER-29-009-REL | BLOCKED (2025-11-25) | Needs LEDGER-29-009 dev outputs | DevOps Guild · Findings Ledger Guild | Release/offline-kit packaging for ledger manifests/backups. |
|
||||
| 23 | DEVOPS-LEDGER-TEN-48-001-REL | BLOCKED (2025-11-25) | Needs ledger tenant partition work | DevOps Guild · Findings Ledger Guild | Apply RLS/partition migrations in release pipelines; publish manifests/offline-kit artefacts. |
|
||||
| 24 | DEVOPS-SCANNER-JAVA-21-011-REL | BLOCKED (2025-11-25) | Needs SCANNER-ANALYZERS-JAVA-21-011 outputs | DevOps Guild · Java Analyzer Guild | Package/sign Java analyzer plug-in for release/offline kits. |
|
||||
| 16 | DEVOPS-CI-110-001 | DONE (2025-11-25) | None | DevOps Guild · Concelier Guild · Excititor Guild | CI helper + TRX slices at `ops/devops/ci-110-runner/`; warm restore + health smokes. |
|
||||
| 17 | MIRROR-CRT-56-CI-001 | DONE (2025-11-25) | None | Mirror Creator Guild · DevOps Guild | Move `make-thin-v1.sh` into CI assembler, enforce DSSE/TUF/time-anchor, publish milestone hashes. |
|
||||
| 18 | MIRROR-CRT-56-002 | DONE (2025-11-25) | Depends on 56-CI-001 | Mirror Creator Guild · Security Guild | Release signing for thin bundle v1 using `MIRROR_SIGN_KEY_B64`; run `.gitea/workflows/mirror-sign.yml`. |
|
||||
| 19 | MIRROR-CRT-57-001/002 | BLOCKED | Wait on 56-002 + AIRGAP-TIME-57-001 | Mirror Creator Guild · AirGap Time Guild | OCI/time-anchor signing follow-ons. |
|
||||
| 20 | MIRROR-CRT-58-001/002 | DONE (dev) | Depends on 56-002 | Mirror Creator · CLI · Exporter Guilds | CLI/Export signing follow-ons delivered in dev mode (Export Center scheduling helper + CI dev-key fallback); production signing still awaits `MIRROR_SIGN_KEY_B64`. |
|
||||
| 21 | EXPORT-OBS-51-001 / 54-001 / AIRGAP-TIME-57-001 / CLI-AIRGAP-56-001 / PROV-OBS-53-001 | BLOCKED | Need signed thin bundle + time anchors | Exporter · AirGap Time · CLI Guild | Export/airgap provenance chain work. |
|
||||
| 22 | DEVOPS-LEDGER-29-009-REL | BLOCKED (2025-11-25) | Needs LEDGER-29-009 dev outputs | DevOps Guild · Findings Ledger Guild | Release/offline-kit packaging for ledger manifests/backups. |
|
||||
| 23 | DEVOPS-LEDGER-TEN-48-001-REL | BLOCKED (2025-11-25) | Needs ledger tenant partition work | DevOps Guild · Findings Ledger Guild | Apply RLS/partition migrations in release pipelines; publish manifests/offline-kit artefacts. |
|
||||
| 24 | DEVOPS-SCANNER-JAVA-21-011-REL | BLOCKED (2025-11-25) | Needs SCANNER-ANALYZERS-JAVA-21-011 outputs | DevOps Guild · Java Analyzer Guild | Package/sign Java analyzer plug-in for release/offline kits. |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
@@ -52,6 +52,8 @@
|
||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||
| 2025-12-04 | Renamed from `SPRINT_506_ops_devops_iv.md` to template-compliant `SPRINT_0506_0001_0001_ops_devops_iv.md`; no status changes. | Project PM |
|
||||
| 2025-12-03 | Normalised sprint file to standard template; preserved all tasks/logs; no status changes. | Planning |
|
||||
| 2025-12-07 | MIRROR-CRT-58-001/002 closed in dev: Export Center scheduling helper added; CI dev-key fallback wired in `.gitea/workflows/mirror-sign.yml`. Production signing still requires `MIRROR_SIGN_KEY_B64`. | Project Mgmt |
|
||||
| 2025-12-07 | MIRROR-CRT-58-002 progressed: added Export Center scheduling helper (`src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh`) for dev scheduling/audit; production signing still waiting on `MIRROR_SIGN_KEY_B64`. | Implementer |
|
||||
| 2025-11-25 | DEVOPS-CI-110-001 runner published at `ops/devops/ci-110-runner/`; initial TRX slices stored under `ops/devops/artifacts/ci-110/20251125T030557Z/`. | DevOps |
|
||||
| 2025-11-25 | MIRROR-CRT-56-CI-001 completed: CI signing script emits milestone hash summary, enforces DSSE/TUF/time-anchor steps, uploads `milestone.json` via `mirror-sign.yml`. | DevOps |
|
||||
| 2025-11-25 | DEVOPS-OBS-55-001 completed: added incident-mode automation script (`scripts/observability/incident-mode.sh`) and runbook (`ops/devops/observability/incident-mode.md`). | DevOps |
|
||||
@@ -80,7 +82,7 @@
|
||||
- Cosign key management supports keyless; offline/air-gap paths require mirrored registry + secrets provided to `sbom_attest.sh`.
|
||||
- Tenant chaos drill requires iptables/root; run only on isolated agents; monitor JWKS cache TTL to avoid auth outages.
|
||||
- Surface.Env: ZASTAVA_* fallback to SCANNER_* in Helm/Compose; keep docs aligned if prefixes/fields change.
|
||||
- Surface.Secrets: provisioning playbook published; ensure Helm/Compose env stays in sync; offline kit bundles encrypted secrets—unpack path must match `*_SURFACE_SECRETS_ROOT`.
|
||||
- Surface.Secrets: provisioning playbook published; ensure Helm/Compose env stays in sync; offline kit bundles encrypted secrets—unpack path must match `*_SURFACE_SECRETS_ROOT`.
|
||||
|
||||
## Next Checkpoints
|
||||
| Date (UTC) | Session / Owner | Target outcome | Fallback / Escalation |
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
| 6 | KCMVP-01 | DONE (2025-12-07) | None | Security · Crypto | Provide KCMVP hash-only baseline (SHA-256) with labeling; add tests and profile docs. |
|
||||
| 7 | KCMVP-02 | BLOCKED (2025-12-06) | Licensed module | Security · Crypto | Add ARIA/SEED/KCDSA provider once certified toolchain available. |
|
||||
| 8 | PQ-IMPL-01 | DONE (2025-12-07) | Registry mapping (R3) to resolve | Crypto · Scanner | Implement `pq-dilithium3` and `pq-falcon512` providers via liboqs/oqs-provider; vendor libs for offline; add deterministic vectors. |
|
||||
| 9 | PQ-IMPL-02 | TODO | After #8 | Scanner · Attestor · Policy | Wire DSSE signing overrides, dual-sign toggles, deterministic regression tests across providers (Scanner/Attestor/Policy). |
|
||||
| 9 | PQ-IMPL-02 | DONE (2025-12-07) | After #8 | Scanner · Attestor · Policy | Wire DSSE signing overrides, dual-sign toggles, deterministic regression tests across providers (Scanner/Attestor/Policy). |
|
||||
| 10 | ROOTPACK-INTL-01 | DOING (2025-12-07) | After baseline tasks (1,4,6,8) | Ops · Docs | Build rootpack variants (us-fips baseline, eu baseline, korea hash-only, PQ addenda) with signed manifests/tests; clearly label certification gaps. |
|
||||
|
||||
## Execution Log
|
||||
@@ -40,6 +40,8 @@
|
||||
| 2025-12-07 | Added software compliance providers (`fips.ecdsa.soft`, `eu.eidas.soft`, `kr.kcmvp.hash`, `pq.soft`) with unit tests; set tasks 1 and 6 to DONE; 2,4,8,10 moved to DOING pending host wiring and certified modules. | Implementer |
|
||||
| 2025-12-07 | Drafted regional rootpacks (`etc/rootpack/us-fips`, `etc/rootpack/eu`, `etc/rootpack/kr`) including PQ soft provider; registry DI registers new providers. | Implementer |
|
||||
| 2025-12-07 | Added deterministic PQ test vectors (fixed keys/signatures) in `StellaOps.Cryptography.Tests`; PQ-IMPL-01 marked DONE. | Implementer |
|
||||
| 2025-12-07 | Wired Signer DSSE dual-sign (secondary PQ/SM allowed via options), fixed DI to provide ICryptoHmac, and adjusted SM2 test seeding; Signer test suite passing. Set PQ-IMPL-02 to DOING. | Implementer |
|
||||
| 2025-12-07 | Added Attestor dual-sign regression (min 2 signatures) and fixed SM2 registry tests; Attestor test suite passing. PQ-IMPL-02 marked DONE. | Implementer |
|
||||
|
||||
## Decisions & Risks
|
||||
- FIPS validation lead time may slip; interim non-certified baseline acceptable but must be clearly labeled until CMVP module lands (task 3).
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
# Sprint 3407-1-2 · Concelier Postgres JSON Cutover
|
||||
|
||||
## Topic & Scope
|
||||
- Build a Postgres-native JSON abstraction for Concelier documents/DTO/state/aliases/flags and eliminate all Mongo/MongoCompat/BSON shims.
|
||||
- Migrate connectors, exporters, and tests from MongoDB.Driver/Mongo2Go to the new abstraction; ensure deterministic JSON handling and Postgres-only green build.
|
||||
- Prepare removal of `StellaOps.Concelier.Storage.Mongo` and compat code paths while preserving LNM/AOC contracts and offline posture.
|
||||
- **Working directory:** `src/Concelier/**` (WebService, __Libraries, __Tests, Storage.Postgres); delete remaining Mongo artefacts once migration is green.
|
||||
|
||||
## Dependencies & Concurrency
|
||||
- Upstream: Sprint `SPRINT_3407_0001_0001_postgres_cleanup.md` Wave A decisions (PG-T7.1.*) remain in force; this sprint delivers PG-T7.1.5c/d readiness.
|
||||
- Must stay compatible with existing Postgres document/state tables and Concelier Merge constraints; no cross-module changes expected.
|
||||
- Run after package cache is available (Microsoft.Extensions.* 10.0.0).
|
||||
|
||||
## Documentation Prerequisites
|
||||
- `docs/README.md`
|
||||
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/modules/concelier/architecture.md`
|
||||
- `docs/modules/concelier/link-not-merge-schema.md`
|
||||
- `src/Concelier/AGENTS.md`
|
||||
|
||||
## Delivery Tracker
|
||||
| # | Task ID | Status | Key dependency / next step | Owners | Task Definition |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
| 1 | PG-T7.1.5c-01 | DOING | Align JSON abstraction with LNM schema; confirm Postgres storage layout | Concelier · Backend | Define Postgres JSON stores (document, DTO, state, alias, flag) and DI registrations; document JSON contract (hashing, ordering, timestamps). |
|
||||
| 2 | PG-T7.1.5c-02 | TODO | Task 1 | Concelier · Backend | Implement JSON stores in Storage.Postgres (payload/metadata/headers as JSON), replace MongoCompat/BSON types; add migrations if new columns are needed. |
|
||||
| 3 | PG-T7.1.5c-03 | TODO | Task 2 | Concelier · Backend | Refactor connectors/exporters to the JSON stores (remove MongoDB.Driver/Mongo2Go, BSON cursors); update DTO parsing to System.Text.Json. |
|
||||
| 4 | PG-T7.1.5c-04 | TODO | Task 2 | Concelier · QA | Replace Mongo test harnesses (Mongo2Go, ConnectorTestHarness, importer parity) with Postgres/JSON fixtures; fix WebService tests. |
|
||||
| 5 | PG-T7.1.5c-05 | TODO | Tasks 2-4 | Concelier · Backend | Remove MongoCompat/BSON stubs and `StellaOps.Concelier.Storage.Mongo` references from solution/csproj; clean package refs/usings. |
|
||||
| 6 | PG-T7.1.5c-06 | TODO | Tasks 3-5 | Concelier · QA | Run full Concelier solution build/tests on Postgres-only path; collect evidence (logs, artifact paths) and mark PG-T7.1.5c ready for deletion of Mongo artefacts. |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | Sprint created to plan Postgres JSON cutover and Mongo removal for Concelier. | Project Mgmt |
|
||||
| 2025-12-07 | PG-T7.1.5c-01 set to DOING; starting JSON store contract design and mapping to existing Postgres tables. | Concelier Guild |
|
||||
|
||||
## Decisions & Risks
|
||||
- Need confirmation that JSON storage semantics (hashing, ordering, timestamps) match existing LNM expectations; deviations require doc updates and approvals.
|
||||
- Risk: hidden MongoDB.Driver references in less-used connectors/tests could extend migration time; mitigate by inventory + phased PRs.
|
||||
- Risk: Postgres schema changes may be needed (JSON columns, indexes); must stay deterministic and air-gap friendly.
|
||||
|
||||
## Next Checkpoints
|
||||
- 2025-12-08: Review JSON abstraction design and storage schema; approve migrations and DI changes.
|
||||
- 2025-12-10: Demo connector/test migration progress; decide on Mongo artefact deletion window.
|
||||
@@ -41,18 +41,18 @@
|
||||
| P12 | PREP-EXPORT-OAS-62-001-DEPENDS-ON-61-002 | DONE (2025-11-20) | Prep artefact at `docs/modules/export-center/prep/2025-11-20-export-oas-62-001-prep.md`; depends on discovery endpoint. | Exporter Service Guild · SDK Generator Guild | Depends on 61-002. <br><br> Document artefact/deliverable for EXPORT-OAS-62-001 and publish location so downstream tasks can proceed. |
|
||||
| P13 | PREP-EXPORTER-SERVICE-EVIDENCELOCKER-GUILD-BL | DONE (2025-11-20) | Prep note at `docs/modules/export-center/prep/2025-11-20-exporter-evidencelocker-blocker.md`; awaiting sealed bundle schema/hash. | Planning | BLOCKED (awaits EvidenceLocker contract). <br><br> Document artefact/deliverable for Exporter Service · EvidenceLocker Guild and publish location so downstream tasks can proceed. |
|
||||
| P14 | PREP-ORCHESTRATOR-NOTIFICATIONS-SCHEMA-HANDOF | DONE (2025-11-20) | Prep note at `docs/events/prep/2025-11-20-orchestrator-notifications-schema-handoff.md`. | Planning | If not ready, keep tasks BLOCKED and escalate to Wave 150/140 leads. <br><br> Document artefact/deliverable for Orchestrator + Notifications schema handoff and publish location so downstream tasks can proceed. |
|
||||
| 1 | DVOFF-64-002 | TODO | EvidenceLocker bundle spec delivered (`docs/modules/evidence-locker/bundle-packaging.schema.json`); ready to implement. | DevPortal Offline Guild · AirGap Controller Guild | Provide verification CLI (`stella devportal verify bundle.tgz`) ensuring integrity before import. |
|
||||
| 2 | EXPORT-AIRGAP-56-001 | TODO | EvidenceLocker + AdvisoryAI schemas delivered; ready to implement. | Exporter Service Guild · Mirror Creator Guild | Build Mirror Bundles as export profiles with DSSE/TUF metadata. |
|
||||
| 3 | EXPORT-AIRGAP-56-002 | TODO | Depends on 56-001; chain unblocked. | Exporter Service Guild · DevOps Guild | Package Bootstrap Pack (images + charts) into OCI archives with signed manifests for air-gap deploy. |
|
||||
| 4 | EXPORT-AIRGAP-57-001 | TODO | Depends on 56-002; EvidenceLocker bundle format available. | Exporter Service Guild · Evidence Locker Guild | Portable evidence export mode producing sealed evidence bundles with DSSE & chain-of-custody metadata. |
|
||||
| 5 | EXPORT-AIRGAP-58-001 | TODO | Depends on 57-001; orchestrator envelope schema delivered. | Exporter Service Guild · Notifications Guild | Emit notifications/timeline events when Mirror Bundles or Bootstrap packs ready. |
|
||||
| 6 | EXPORT-ATTEST-74-001 | TODO | EvidenceLocker bundle spec delivered; ready to implement. | Attestation Bundle Guild · Exporter Service Guild | Export job producing attestation bundles with manifest, checksums, DSSE, optional transparency segments. |
|
||||
| 7 | EXPORT-ATTEST-74-002 | TODO | Depends on 74-001; chain unblocked. | Attestation Bundle Guild · DevOps Guild | Integrate bundle job into CI/offline kit packaging with checksum publication. |
|
||||
| 8 | EXPORT-ATTEST-75-001 | TODO | Depends on 74-002; chain unblocked. | Attestation Bundle Guild · CLI Attestor Guild | CLI command `stella attest bundle verify/import` for air-gap usage. |
|
||||
| 9 | EXPORT-ATTEST-75-002 | TODO | Depends on 75-001; chain unblocked. | Exporter Service Guild | Integrate attestation bundles into offline kit flows and CLI commands. |
|
||||
| 10 | EXPORT-OAS-61-001 | TODO | Export API surface now defined; ready to implement OAS. | Exporter Service Guild · API Contracts Guild | Update Exporter OAS covering profiles/runs/downloads with standard error envelope + examples. |
|
||||
| 11 | EXPORT-OAS-61-002 | TODO | Depends on 61-001; chain unblocked. | Exporter Service Guild | `/.well-known/openapi` discovery endpoint with version metadata and ETag. |
|
||||
| 12 | EXPORT-OAS-62-001 | TODO | Depends on 61-002; chain unblocked. | Exporter Service Guild · SDK Generator Guild | Ensure SDKs include export profile/run clients with streaming helpers; add smoke tests. |
|
||||
| 1 | DVOFF-64-002 | DONE | CLI command implemented with service, tests, and exit codes per spec. | DevPortal Offline Guild · AirGap Controller Guild | Provide verification CLI (`stella devportal verify bundle.tgz`) ensuring integrity before import. |
|
||||
| 2 | EXPORT-AIRGAP-56-001 | DONE | Mirror bundle builder with DSSE signing implemented; tests added. | Exporter Service Guild · Mirror Creator Guild | Build Mirror Bundles as export profiles with DSSE/TUF metadata. |
|
||||
| 3 | EXPORT-AIRGAP-56-002 | DONE | Bootstrap pack builder with OCI layout implemented; tests added. | Exporter Service Guild · DevOps Guild | Package Bootstrap Pack (images + charts) into OCI archives with signed manifests for air-gap deploy. |
|
||||
| 4 | EXPORT-AIRGAP-57-001 | DONE | Portable evidence export builder implemented; tests added. | Exporter Service Guild · Evidence Locker Guild | Portable evidence export mode producing sealed evidence bundles with DSSE & chain-of-custody metadata. |
|
||||
| 5 | EXPORT-AIRGAP-58-001 | DONE | Notification emitter with NATS sink, webhook delivery, HMAC-SHA256 signing, retry logic, and DLQ implemented. | Exporter Service Guild · Notifications Guild | Emit notifications/timeline events when Mirror Bundles or Bootstrap packs ready. |
|
||||
| 6 | EXPORT-ATTEST-74-001 | DONE | Attestation bundle builder with DSSE envelope passthrough, transparency log support, deterministic packaging implemented. | Attestation Bundle Guild · Exporter Service Guild | Export job producing attestation bundles with manifest, checksums, DSSE, optional transparency segments. |
|
||||
| 7 | EXPORT-ATTEST-74-002 | DONE | OfflineKitPackager with immutable artefacts, checksum publication, manifest generation implemented. | Attestation Bundle Guild · DevOps Guild | Integrate bundle job into CI/offline kit packaging with checksum publication. |
|
||||
| 8 | EXPORT-ATTEST-75-001 | DONE | CLI verifier/importer with DSSE validation, checksum verification, transparency checks implemented. | Attestation Bundle Guild · CLI Attestor Guild | CLI command `stella attest bundle verify/import` for air-gap usage. |
|
||||
| 9 | EXPORT-ATTEST-75-002 | DONE | OfflineKitDistributor with mirror publication, manifest-offline.json generation, and bit-for-bit verification implemented. | Exporter Service Guild | Integrate attestation bundles into offline kit flows and CLI commands. |
|
||||
| 10 | EXPORT-OAS-61-001 | DONE | OpenAPI v1 spec published with deterministic examples, ETag/versioning, and standard error envelopes. | Exporter Service Guild · API Contracts Guild | Update Exporter OAS covering profiles/runs/downloads with standard error envelope + examples. |
|
||||
| 11 | EXPORT-OAS-61-002 | DONE | Discovery endpoint implemented with ETag, If-None-Match, Cache-Control headers. | Exporter Service Guild | `/.well-known/openapi` discovery endpoint with version metadata and ETag. |
|
||||
| 12 | EXPORT-OAS-62-001 | DONE | SDK client project with interface, implementation, streaming/lifecycle helpers, and smoke tests. | Exporter Service Guild · SDK Generator Guild | Ensure SDKs include export profile/run clients with streaming helpers; add smoke tests. |
|
||||
| 13 | EXPORT-GAPS-162-013 | DONE (2025-12-04) | None; informs tasks 1–12. | Product Mgmt · Exporter Guild · Evidence Locker Guild | Address EC1–EC10 from `docs/product-advisories/28-Nov-2025 - Export Center and Reporting Strategy.md`: publish signed ExportProfile + manifest schemas with selector validation; define per-adapter determinism rules + rerun-hash CI; mandate DSSE/SLSA attestation with log metadata; enforce cross-tenant approval flow; require distribution integrity headers + OCI annotations; pin Trivy schema versions; formalize mirror delta/tombstone rules; document encryption/recipient policy; set quotas/backpressure; and produce offline export kit + verify script under `docs/modules/export-center/determinism.md` with fixtures in `src/ExportCenter/__fixtures`. |
|
||||
|
||||
## Action Tracker
|
||||
@@ -98,6 +98,17 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | Completed EXPORT-OAS-62-001: implemented ExportCenter SDK client in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/` with: IExportCenterClient interface for discovery, profiles, runs, evidence exports, and attestation exports operations; ExportCenterClient implementation with HttpClient using System.Net.Http.Json; ExportCenterClientOptions for configuration; Models (ExportModels.cs) with ExportProfile, ExportRun, ExportStatus, ErrorEnvelope types; Lifecycle/ExportJobLifecycleHelper for polling with CreateAndWait, WaitForCompletion, and download helpers; Streaming/ExportDownloadHelper for progress reporting, SHA-256 verification, and byte counting; Extensions/ServiceCollectionExtensions for DI registration. Added comprehensive smoke tests in Client.Tests project covering HTTP mocking, lifecycle polling, and download verification. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-OAS-61-002: implemented OpenApiDiscoveryEndpoints in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/` with: `/.well-known/openapi` and `/.well-known/openapi.json` discovery endpoints returning service metadata (version, specVersion, format, url, profilesSupported), `/openapi/export-center.yaml` serving the OpenAPI spec (embedded resource or file fallback with minimal inline spec), `/openapi/export-center.json` with redirect to YAML endpoint, ETag with SHA-256 hash computation, If-None-Match support returning 304 Not Modified, Cache-Control (public, max-age=300), X-Export-Oas-Version and Last-Modified headers, OpenApiDiscoveryResponse model with camelCase JSON serialization. Updated Program.cs to wire up discovery endpoints. Added unit tests in OpenApiDiscoveryEndpointsTests.cs. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-OAS-61-001: published OpenAPI v1 spec at `docs/modules/export-center/openapi/export-center.v1.yaml` with: `/.well-known/openapi` discovery endpoint, evidence export endpoints (POST create, GET status, GET download), attestation export endpoints (POST create, GET status, GET download), profiles/runs listing with pagination, deterministic examples using fixed timestamps (2025-01-01T00:00:00Z) and placeholder hashes, ETag/Last-Modified/Cache-Control headers, OAuth2 bearer + mTLS security, standard ErrorEnvelope with correlationId, X-Stella-Quota-* headers. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-ATTEST-75-002: implemented OfflineKitDistributor in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/` with: bit-for-bit distribution to mirror locations (mirror/export/attestations/{version}/), manifest-offline.json generation with entries for attestation/mirror/bootstrap bundles, CLI examples and import commands, manifest checksum publication, verification to ensure distributed kit matches source. Added comprehensive tests in `OfflineKitDistributorTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-ATTEST-75-001: implemented AttestationBundleVerifier in `src/Cli/StellaOps.Cli/Services/` (IAttestationBundleVerifier.cs, AttestationBundleVerifier.cs, Models/AttestationBundleModels.cs) with: archive extraction and checksum verification (internal + external), DSSE envelope payload validation, transparency.ndjson requirement check (non-offline mode), metadata extraction with subject digests, exit codes per spec (0=success, 2=checksum, 3=signature, 4=transparency, 5=format, 6=notfound, 7=import). Added comprehensive tests in `AttestationBundleVerifierTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-ATTEST-74-002: implemented OfflineKitPackager in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/` (OfflineKitModels.cs, OfflineKitPackager.cs) with: immutable artefact placement with write-once semantics, checksum publication in `{hash} {filename}` format, manifest.json generation with version/kitId/entries, directory structure per spec (attestations/, mirrors/, bootstrap/, checksums/). Added comprehensive tests in `OfflineKitPackagerTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-ATTEST-74-001: implemented AttestationBundleBuilder in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/AttestationBundle/` (AttestationBundleModels.cs, AttestationBundleBuilder.cs) with: DSSE envelope passthrough (bit-for-bit copy), statement extraction, optional transparency.ndjson (sorted lexically), metadata.json with subject digests, checksums.txt, verify-attestation.sh (POSIX offline script). Added comprehensive tests in `AttestationBundleBuilderTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-AIRGAP-58-001: implemented ExportNotificationEmitter in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/` (ExportNotificationModels.cs, ExportNotificationEmitter.cs, ExportWebhookClient.cs) with: NATS sink abstraction, webhook delivery with HMAC-SHA256 PAE signing, exponential backoff retry (1s/2s/4s/8s/16s, max 5 attempts), DLQ routing for failed deliveries, in-memory test doubles. Added comprehensive tests in `ExportNotificationEmitterTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-AIRGAP-57-001: implemented PortableEvidenceExportBuilder in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/PortableEvidence/` (models, builder). Added comprehensive tests in `PortableEvidenceExportBuilderTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-AIRGAP-56-002: implemented BootstrapPackBuilder with OCI image layout in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/BootstrapPack/` (models, builder). Added comprehensive tests in `BootstrapPackBuilderTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-AIRGAP-56-001: implemented MirrorBundleBuilder with DSSE signing in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/` (models, builder, signing). Added comprehensive tests in `MirrorBundleBuilderTests.cs` and `MirrorBundleSigningTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | **Wave 10 unblock:** EvidenceLocker bundle spec (`docs/modules/evidence-locker/bundle-packaging.schema.json`) and AdvisoryAI evidence bundle schema (`docs/events/advisoryai.evidence.bundle@1.schema.json`) delivered. All 12 implementation tasks (DVOFF-64-002, EXPORT-AIRGAP-56/57/58, EXPORT-ATTEST-74/75, EXPORT-OAS-61/62) moved from BLOCKED → TODO. Interlocks updated. | Implementer |
|
||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||
| 2025-11-20 | Completed PREP-EXPORT-AIRGAP-58-001: published notification/timeline contract for air-gap export readiness (`docs/modules/export-center/prep/2025-11-20-export-airgap-58-001-prep.md`); status set to DONE. | Implementer |
|
||||
@@ -1,4 +1,6 @@
|
||||
# Blocked Task Dependency Tree (as of 2025-11-30)
|
||||
# Blocked Task Dependency Tree (as of 2025-12-07)
|
||||
|
||||
Updated 2025-12-07: FEEDCONN-ICSCISA-02-012/KISA-02-008 unblocked (ICS/KISA SOP v0.2); tracked in SPRINT_0113 row 18 and SPRINT_0503 feed ops tasks.
|
||||
|
||||
- Concelier ingestion & Link-Not-Merge
|
||||
- MIRROR-CRT-56-001 (DONE; thin bundle v1 sample + hashes published)
|
||||
@@ -13,7 +15,6 @@
|
||||
- CLI-AIRGAP-56-001 (DEV-UNBLOCKED: dev bundles available; release promotion depends on DevOps secret import + 58-001 CLI path)
|
||||
- CONCELIER-AIRGAP-56-001..58-001 <- PREP-ART-56-001, PREP-EVIDENCE-BDL-01
|
||||
- CONCELIER-CONSOLE-23-001..003 <- PREP-CONSOLE-FIXTURES-29; PREP-EVIDENCE-BDL-01
|
||||
- FEEDCONN-ICSCISA-02-012 / KISA-02-008 <- PREP-FEEDCONN-ICS-KISA-PLAN
|
||||
|
||||
- SBOM Service (Link-Not-Merge consumers)
|
||||
- SBOM-SERVICE-21-001 (projection read API) — DONE (2025-11-23): WAF aligned with fixtures + in-memory repo fallback; `ProjectionEndpointTests` pass.
|
||||
@@ -49,8 +50,8 @@
|
||||
- CONCELIER-WEB-OBS-50-001 ✅ (telemetry core adopted 2025-11-07) -> 51-001 ✅ (health endpoint shipped 2025-11-23) -> 52-001
|
||||
|
||||
- Advisory AI docs & packaging
|
||||
- AIAI-PACKAGING-31-002 & AIAI-DOCS-31-001 <- SBOM feeds + CLI/Policy artefacts
|
||||
- DOCS-AIAI-31-005 -> 31-006 -> 31-008 -> 31-009 (all gated by DOCS-UNBLOCK-CLI-KNOBS-301 <- CLI-VULN-29-001; CLI-VEX-30-001; POLICY-ENGINE-31-001)
|
||||
- AIAI-PACKAGING-31-002 & AIAI-DOCS-31-001 <- SBOM feeds + DEVOPS-AIAI-31-001 (CLI-VULN-29-001/CLI-VEX-30-001 landed via Sprint 0205 on 2025-12-06; POLICY-ENGINE-31-001 delivered 2025-11-23)
|
||||
- DOCS-AIAI-31-005 -> 31-006 -> 31-008 -> 31-009 (DOCS-UNBLOCK-CLI-KNOBS-301 satisfied: CLI-VULN-29-001/CLI-VEX-30-001 delivered 2025-12-06; POLICY-ENGINE-31-001 delivered 2025-11-23; remaining gate: DEVOPS-AIAI-31-001 rollout)
|
||||
|
||||
- Policy Engine (core) chain
|
||||
- POLICY-ENGINE-29-003 implemented (path-scope streaming endpoint live); downstream tasks 29-004+ remain open but unblocked.
|
||||
@@ -141,7 +142,7 @@
|
||||
- PROV-OBS-53-002 ✅ -> PROV-OBS-53-003 ✅
|
||||
|
||||
- CLI/Advisory AI handoff
|
||||
- SBOM-AIAI-31-003 <- CLI-VULN-29-001; CLI-VEX-30-001
|
||||
- DOCS-AIAI-31-005/006/008/009 <- CLI-VULN-29-001; CLI-VEX-30-001; POLICY-ENGINE-31-001; DEVOPS-AIAI-31-001
|
||||
- SBOM-AIAI-31-003 (CLI-VULN-29-001/CLI-VEX-30-001 delivered 2025-12-06; completed in Sprint 0110; keep DEVOPS-AIAI-31-001 packaging in view)
|
||||
- DOCS-AIAI-31-005/006/008/009 (CLI-VULN-29-001/CLI-VEX-30-001 delivered 2025-12-06; POLICY-ENGINE-31-001 delivered 2025-11-23; remaining dependency: DEVOPS-AIAI-31-001 for ops rollout)
|
||||
|
||||
Note: POLICY-20-001 is defined and tracked in `docs/implplan/SPRINT_0114_0001_0003_concelier_iii.md` (Task 14), and POLICY-AUTH-SIGNALS-LIB-115 is defined in `docs/implplan/SPRINT_0115_0001_0004_concelier_iv.md` (Task 0); both scopes match the expectations captured here.
|
||||
|
||||
@@ -372,13 +372,13 @@
|
||||
| CLI-SIG-26-002 | TODO | | SPRINT_0204_0001_0004_cli_iv | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Extend `stella policy simulate` with reachability override flags (`--reachability-state`, `--reachability-score`). Dependencies: CLI-SIG-26-001. | CLI-SIG-26-001 | CLCI0108 |
|
||||
| CLI-TEN-47-001 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella login`, `whoami`, `tenants list`, persistent profiles, secure token storage, and `--tenant` override with validation. | — | CLCI0108 |
|
||||
| CLI-TEN-49-001 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Add service account token minting, delegation (`stella token delegate`), impersonation banner, and audit-friendly logging. Dependencies: CLI-TEN-47-001. | CLI-TEN-47-001 | CLCI0108 |
|
||||
| CLI-VEX-30-001 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex consensus list` with filters, paging, policy selection, `--json/--csv`. | PLVL0102 completion | CLCI0107 |
|
||||
| CLI-VEX-30-002 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex consensus show` displaying quorum, evidence, rationale, signature status. Dependencies: CLI-VEX-30-001. | CLI-VEX-30-001 | CLCI0107 |
|
||||
| CLI-VEX-30-001 | DONE (2025-12-06) | 2025-12-06 | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex consensus list` with filters, paging, policy selection, `--json/--csv`. | PLVL0102 completion | CLCI0107 |
|
||||
| CLI-VEX-30-002 | DONE (2025-12-06) | 2025-12-06 | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex consensus show` displaying quorum, evidence, rationale, signature status. Dependencies: CLI-VEX-30-001. | CLI-VEX-30-001 | CLCI0107 |
|
||||
| CLI-VEX-30-003 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex simulate` for trust/threshold overrides with JSON diff output. Dependencies: CLI-VEX-30-002. | CLI-VEX-30-002 | CLCI0107 |
|
||||
| CLI-VEX-30-004 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex export` for consensus NDJSON bundles with signature verification helper. Dependencies: CLI-VEX-30-003. | CLI-VEX-30-003 | CLCI0107 |
|
||||
| CLI-VEX-401-011 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | CLI Guild | `src/Cli/StellaOps.Cli`, `docs/modules/cli/architecture.md`, `docs/benchmarks/vex-evidence-playbook.md` | Add `stella decision export | Reachability API exposure | CLCI0107 |
|
||||
| CLI-VULN-29-001 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vuln list` with grouping, paging, filters, `--json/--csv`, and policy selection. | — | CLCI0107 |
|
||||
| CLI-VULN-29-002 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vuln show` displaying evidence, policy rationale, paths, ledger summary; support `--json` for automation. Dependencies: CLI-VULN-29-001. | CLI-VULN-29-001 | CLCI0107 |
|
||||
| CLI-VULN-29-001 | DONE (2025-12-06) | 2025-12-06 | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vuln list` with grouping, paging, filters, `--json/--csv`, and policy selection. | — | CLCI0107 |
|
||||
| CLI-VULN-29-002 | DONE (2025-12-06) | 2025-12-06 | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vuln show` displaying evidence, policy rationale, paths, ledger summary; support `--json` for automation. Dependencies: CLI-VULN-29-001. | CLI-VULN-29-001 | CLCI0107 |
|
||||
| CLI-VULN-29-003 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Add workflow commands (`assign`, `comment`, `accept-risk`, `verify-fix`, `target-fix`, `reopen`) with filter selection (`--filter`) and idempotent retries. Dependencies: CLI-VULN-29-002. | CLI-VULN-29-002 | CLCI0107 |
|
||||
| CLI-VULN-29-004 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vuln simulate` producing delta summaries and optional Markdown report for CI. Dependencies: CLI-VULN-29-003. | CLI-VULN-29-003 | CLCI0107 |
|
||||
| CLI-VULN-29-005 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Add `stella vuln export` and `stella vuln bundle verify` commands to trigger/download evidence bundles and verify signatures. Dependencies: CLI-VULN-29-004. | CLI-VULN-29-004 | CLCI0107 |
|
||||
@@ -1196,7 +1196,7 @@
|
||||
| MIRROR-CRT-57-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | OCI/time-anchor workstreams blocked pending assembler + time contract. | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 |
|
||||
| MIRROR-CRT-57-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | CLI + Export automation depends on assembler and DSSE/TUF track. | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-002 | DOING | 2025-12-07 | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | src/Mirror/StellaOps.Mirror.Creator | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001; dev key: tools/cosign/cosign.dev.key (pw stellaops-dev); prod: MIRROR_SIGN_KEY_B64 | ATMI0101 |
|
||||
| MTLS-11-002 | DONE | 2025-11-08 | SPRINT_100_identity_signing | Authority Core & Security Guild | src/Authority/StellaOps.Authority | Refresh grants enforce original client cert, tokens persist `x5t#S256` metadata, docs updated. | AUTH-DPOP-11-001 | AUIN0102 |
|
||||
| NATIVE-401-015 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | Scanner Worker Guild | `src/Scanner/__Libraries/StellaOps.Scanner.Symbols.Native`, `src/Scanner/__Libraries/StellaOps.Scanner.CallGraph.Native` | Bootstrap Symbols.Native + CallGraph.Native scaffolding and coverage fixtures. | Needs replay requirements from DORR0101 | SCNA0101 |
|
||||
| NOTIFY-38-001 | TODO | | SPRINT_0214_0001_0001_web_iii | BE-Base Platform Guild | src/Web/StellaOps.Web | Route approval/rule APIs through Web gateway with tenant scopes. | Wait for NOTY0103 approval payload schema | NOWB0101 |
|
||||
@@ -2586,13 +2586,13 @@
|
||||
| CLI-SIG-26-002 | TODO | | SPRINT_0204_0001_0004_cli_iv | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Extend `stella policy simulate` with reachability override flags (`--reachability-state`, `--reachability-score`). Dependencies: CLI-SIG-26-001. | CLI-SIG-26-001 | CLCI0108 |
|
||||
| CLI-TEN-47-001 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella login`, `whoami`, `tenants list`, persistent profiles, secure token storage, and `--tenant` override with validation. | — | CLCI0108 |
|
||||
| CLI-TEN-49-001 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Add service account token minting, delegation (`stella token delegate`), impersonation banner, and audit-friendly logging. Dependencies: CLI-TEN-47-001. | CLI-TEN-47-001 | CLCI0108 |
|
||||
| CLI-VEX-30-001 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex consensus list` with filters, paging, policy selection, `--json/--csv`. | PLVL0102 completion | CLCI0107 |
|
||||
| CLI-VEX-30-002 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex consensus show` displaying quorum, evidence, rationale, signature status. Dependencies: CLI-VEX-30-001. | CLI-VEX-30-001 | CLCI0107 |
|
||||
| CLI-VEX-30-001 | DONE (2025-12-06) | 2025-12-06 | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex consensus list` with filters, paging, policy selection, `--json/--csv`. | PLVL0102 completion | CLCI0107 |
|
||||
| CLI-VEX-30-002 | DONE (2025-12-06) | 2025-12-06 | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex consensus show` displaying quorum, evidence, rationale, signature status. Dependencies: CLI-VEX-30-001. | CLI-VEX-30-001 | CLCI0107 |
|
||||
| CLI-VEX-30-003 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex simulate` for trust/threshold overrides with JSON diff output. Dependencies: CLI-VEX-30-002. | CLI-VEX-30-002 | CLCI0107 |
|
||||
| CLI-VEX-30-004 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vex export` for consensus NDJSON bundles with signature verification helper. Dependencies: CLI-VEX-30-003. | CLI-VEX-30-003 | CLCI0107 |
|
||||
| CLI-VEX-401-011 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | CLI Guild | `src/Cli/StellaOps.Cli`, `docs/modules/cli/architecture.md`, `docs/benchmarks/vex-evidence-playbook.md` | Add `stella decision export | Reachability API exposure | CLCI0107 |
|
||||
| CLI-VULN-29-001 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vuln list` with grouping, paging, filters, `--json/--csv`, and policy selection. | — | CLCI0107 |
|
||||
| CLI-VULN-29-002 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vuln show` displaying evidence, policy rationale, paths, ledger summary; support `--json` for automation. Dependencies: CLI-VULN-29-001. | CLI-VULN-29-001 | CLCI0107 |
|
||||
| CLI-VULN-29-001 | DONE (2025-12-06) | 2025-12-06 | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vuln list` with grouping, paging, filters, `--json/--csv`, and policy selection. | — | CLCI0107 |
|
||||
| CLI-VULN-29-002 | DONE (2025-12-06) | 2025-12-06 | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vuln show` displaying evidence, policy rationale, paths, ledger summary; support `--json` for automation. Dependencies: CLI-VULN-29-001. | CLI-VULN-29-001 | CLCI0107 |
|
||||
| CLI-VULN-29-003 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Add workflow commands (`assign`, `comment`, `accept-risk`, `verify-fix`, `target-fix`, `reopen`) with filter selection (`--filter`) and idempotent retries. Dependencies: CLI-VULN-29-002. | CLI-VULN-29-002 | CLCI0107 |
|
||||
| CLI-VULN-29-004 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Implement `stella vuln simulate` producing delta summaries and optional Markdown report for CI. Dependencies: CLI-VULN-29-003. | CLI-VULN-29-003 | CLCI0107 |
|
||||
| CLI-VULN-29-005 | TODO | | SPRINT_0205_0001_0005_cli_v | DevEx/CLI Guild | src/Cli/StellaOps.Cli | Add `stella vuln export` and `stella vuln bundle verify` commands to trigger/download evidence bundles and verify signatures. Dependencies: CLI-VULN-29-004. | CLI-VULN-29-004 | CLCI0107 |
|
||||
@@ -3414,7 +3414,7 @@
|
||||
| MIRROR-CRT-57-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | OCI/time-anchor workstreams blocked pending assembler + time contract. | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 |
|
||||
| MIRROR-CRT-57-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | CLI + Export automation depends on assembler and DSSE/TUF track. | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-002 | DOING | 2025-12-07 | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | src/Mirror/StellaOps.Mirror.Creator | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001; dev key: tools/cosign/cosign.dev.key (pw stellaops-dev); prod: MIRROR_SIGN_KEY_B64 | ATMI0101 |
|
||||
| MTLS-11-002 | DONE | 2025-11-08 | SPRINT_100_identity_signing | Authority Core & Security Guild | src/Authority/StellaOps.Authority | Refresh grants enforce original client cert, tokens persist `x5t#S256` metadata, docs updated. | AUTH-DPOP-11-001 | AUIN0102 |
|
||||
| NATIVE-401-015 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | Scanner Worker Guild | `src/Scanner/__Libraries/StellaOps.Scanner.Symbols.Native`, `src/Scanner/__Libraries/StellaOps.Scanner.CallGraph.Native` | Bootstrap Symbols.Native + CallGraph.Native scaffolding and coverage fixtures. | Needs replay requirements from DORR0101 | SCNA0101 |
|
||||
| NOTIFY-38-001 | TODO | | SPRINT_0214_0001_0001_web_iii | BE-Base Platform Guild | src/Web/StellaOps.Web | Route approval/rule APIs through Web gateway with tenant scopes. | Wait for NOTY0103 approval payload schema | NOWB0101 |
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ICSCISA / KISA Feed Remediation Plan (v0.1 · 2025-11-19)
|
||||
# ICSCISA / KISA Feed Remediation Plan (v0.2 - 2025-12-07)
|
||||
|
||||
## Purpose
|
||||
Define a minimal, actionable plan to refresh overdue ICSCISA and KISA connectors, restore provenance freshness, and publish normalized payload fields for downstream Advisory AI and Concelier consumers.
|
||||
@@ -11,28 +11,30 @@ Define a minimal, actionable plan to refresh overdue ICSCISA and KISA connectors
|
||||
## Scope & cadence
|
||||
- Feeds: ICSCISA, KISA (security advisories)
|
||||
- Refresh cadence: weekly pull; publish hashlist and timestamps per run
|
||||
- Staleness budget: <14 days; alert if exceeded
|
||||
- Staleness budget: <14 days; alert if exceeded; flag any run skipped or retried
|
||||
- Execution window (v0.2): first refreshed run by 2025-12-10; weekly thereafter
|
||||
|
||||
## Deliverables (for PREP-FEEDCONN-ICS-KISA-PLAN)
|
||||
1) **Provenance refresh SOP**
|
||||
- Mirror source URLs to internal cache
|
||||
- Record `source_url`, `fetched_at` (UTC), `sha256`, `signature` (if present)
|
||||
- Store run log under `out/feeds/icscisa-kisa/<YYYYMMDD>/fetch.log`
|
||||
- Mirror source URLs to internal cache before parsing; record request/response headers.
|
||||
- Record per-advisory `source_url`, `fetched_at` (UTC), `sha256`, `signature` (if present), and `run_id`.
|
||||
- Store run log under `out/feeds/icscisa-kisa/<YYYYMMDD>/fetch.log` with start/end time, HTTP status histogram, and retry counts.
|
||||
2) **Normalized payload fields**
|
||||
- `advisory_id`, `title`, `summary`, `published`, `updated`, `severity` (pass-through), `cvss` (if provided), `cwe`, `affected_products` (list), `references` (list of URL strings), `signature` (object or null)
|
||||
- Preserve source values; no inference or merging
|
||||
- Required fields: `advisory_id`, `title`, `summary`, `published`, `updated`, `severity` (pass-through), `cvss` (if provided), `cwe`, `affected_products` (list), `references` (list of URL strings), `signature` (object or null).
|
||||
- Preserve source values; no inference or merging; emit deterministic field ordering in NDJSON.
|
||||
3) **Backlog cleanup**
|
||||
- Reprocess last 60 days; compare hash to prior ingests; flag changed advisories
|
||||
- Emit delta report (`out/feeds/icscisa-kisa/<YYYYMMDD>/delta.json`): added/updated/removed ids, counts
|
||||
- Reprocess last 60 days; compare hash to prior ingests; flag changed advisories.
|
||||
- Emit delta report (`out/feeds/icscisa-kisa/<YYYYMMDD>/delta.json`) with `{run_id, added[], updated[], removed[], totals}`; include sha256 of prior vs current payload when changed.
|
||||
4) **Provenance note**
|
||||
- Publish `docs/modules/concelier/feeds/icscisa-kisa-provenance.md` with current signing keys/fingerprints, expected headers, and fallback when signatures missing
|
||||
- Publish `docs/modules/concelier/feeds/icscisa-kisa-provenance.md` with current signing keys/fingerprints, expected headers, and fallback when signatures missing.
|
||||
- Note any unsigned advisories per run with `skip_reason`, and capture verification tooling used.
|
||||
5) **Next review date**
|
||||
- Set to 2025-12-03 (two-week check) and capture SIG verification status
|
||||
- Set to 2025-12-21 (two-week check from v0.2) and capture SIG verification status + open deltas.
|
||||
|
||||
## Actions & timeline
|
||||
- T0 (2025-11-19): adopt SOP + field map; create delta report template
|
||||
- T0+2d (2025-11-21): run backlog reprocess, publish artefacts + hashes
|
||||
- T0+14d (2025-12-03): review staleness, adjust cadence if needed
|
||||
## Actions & timeline (v0.2 refresh)
|
||||
- T0 (2025-12-08): adopt SOP + field map; create delta report template; preflight cache paths.
|
||||
- T0+2d (2025-12-10): run backlog reprocess, publish artefacts + hashes for both feeds; capture unsigned counts and retry reasons.
|
||||
- T0+14d (2025-12-21): review staleness, adjust cadence if needed; reset review date and owners.
|
||||
|
||||
## Artefact locations
|
||||
- Normalized advisories: `out/feeds/icscisa-kisa/<YYYYMMDD>/advisories.ndjson`
|
||||
@@ -41,6 +43,6 @@ Define a minimal, actionable plan to refresh overdue ICSCISA and KISA connectors
|
||||
- Provenance note: `docs/modules/concelier/feeds/icscisa-kisa-provenance.md`
|
||||
|
||||
## Risks & mitigations
|
||||
- Source downtime → mirror last good snapshot; retry daily for 3 days.
|
||||
- Missing signatures → record `signature=null`, log `skip_reason` in provenance note; do not infer validity.
|
||||
- Schema drift → treat as new fields, store raw, add to field map after review (no drop).
|
||||
- Source downtime -> mirror last good snapshot; retry daily for 3 days.
|
||||
- Missing signatures -> record `signature=null`, log `skip_reason` in provenance note; do not infer validity.
|
||||
- Schema drift -> treat as new fields, store raw, add to field map after review (no drop).
|
||||
|
||||
663
docs/modules/export-center/openapi/export-center.v1.yaml
Normal file
663
docs/modules/export-center/openapi/export-center.v1.yaml
Normal file
@@ -0,0 +1,663 @@
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: StellaOps ExportCenter API
|
||||
version: 1.0.0
|
||||
description: >-
|
||||
Export profiles, runs, and deterministic bundle downloads for air-gap and offline deployments.
|
||||
Supports attestation exports, mirror bundles, bootstrap packs, and portable evidence bundles.
|
||||
contact:
|
||||
name: StellaOps Exporter Service Guild
|
||||
x-stella-oas-revision: '2025-12-07'
|
||||
servers:
|
||||
- url: https://{env}.export.api.stellaops.local
|
||||
description: Default environment-scoped host
|
||||
variables:
|
||||
env:
|
||||
default: prod
|
||||
enum: [dev, staging, prod, airgap]
|
||||
- url: https://export.{region}.offline.bundle
|
||||
description: Offline bundle host for air-gapped deployments
|
||||
variables:
|
||||
region:
|
||||
default: local
|
||||
enum: [local]
|
||||
security:
|
||||
- bearerAuth: []
|
||||
- mTLS: []
|
||||
paths:
|
||||
/.well-known/openapi:
|
||||
get:
|
||||
summary: OpenAPI discovery endpoint
|
||||
operationId: getOpenApiDiscovery
|
||||
tags: [discovery]
|
||||
security: []
|
||||
responses:
|
||||
'200':
|
||||
description: OpenAPI specification document
|
||||
headers:
|
||||
ETag:
|
||||
description: SHA-256 hash of the OAS document
|
||||
schema:
|
||||
type: string
|
||||
example: '"sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"'
|
||||
X-Export-Oas-Version:
|
||||
description: OAS version identifier
|
||||
schema:
|
||||
type: string
|
||||
example: 'v1'
|
||||
Last-Modified:
|
||||
description: OAS document last modification time
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
example: '2025-01-01T00:00:00Z'
|
||||
Cache-Control:
|
||||
description: Cache directive
|
||||
schema:
|
||||
type: string
|
||||
example: 'private, must-revalidate'
|
||||
content:
|
||||
application/yaml:
|
||||
schema:
|
||||
type: string
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
|
||||
/v1/exports/profiles:
|
||||
get:
|
||||
summary: List available export profiles
|
||||
operationId: listExportProfiles
|
||||
tags: [profiles]
|
||||
parameters:
|
||||
- name: kind
|
||||
in: query
|
||||
description: Filter by profile kind
|
||||
schema:
|
||||
type: string
|
||||
enum: [attestation, mirror, bootstrap, airgap-evidence]
|
||||
- name: limit
|
||||
in: query
|
||||
description: Maximum number of profiles to return
|
||||
schema:
|
||||
type: integer
|
||||
default: 50
|
||||
maximum: 200
|
||||
- name: cursor
|
||||
in: query
|
||||
description: Pagination cursor from previous response
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: List of export profiles
|
||||
headers:
|
||||
X-Stella-Quota-Remaining:
|
||||
schema:
|
||||
type: integer
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportProfilePage'
|
||||
example:
|
||||
profiles:
|
||||
- id: 'profile-attestation-v1'
|
||||
kind: 'attestation'
|
||||
description: 'Export attestation bundles with DSSE envelopes'
|
||||
version: 'v1'
|
||||
retentionDays: 90
|
||||
- id: 'profile-mirror-full'
|
||||
kind: 'mirror'
|
||||
description: 'Full mirror bundle with all advisories'
|
||||
version: 'v1'
|
||||
retentionDays: 365
|
||||
cursor: null
|
||||
hasMore: false
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
|
||||
/v1/exports/runs:
|
||||
get:
|
||||
summary: List export runs
|
||||
operationId: listExportRuns
|
||||
tags: [runs]
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
- name: profileId
|
||||
in: query
|
||||
description: Filter by export profile
|
||||
schema:
|
||||
type: string
|
||||
- name: status
|
||||
in: query
|
||||
description: Filter by status
|
||||
schema:
|
||||
type: string
|
||||
enum: [pending, running, completed, failed]
|
||||
- name: limit
|
||||
in: query
|
||||
schema:
|
||||
type: integer
|
||||
default: 50
|
||||
maximum: 200
|
||||
- name: cursor
|
||||
in: query
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: List of export runs
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportRunPage'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
|
||||
/v1/exports/airgap/evidence/{bundleId}:
|
||||
post:
|
||||
summary: Create portable evidence export
|
||||
operationId: createEvidenceExport
|
||||
tags: [evidence]
|
||||
parameters:
|
||||
- name: bundleId
|
||||
in: path
|
||||
required: true
|
||||
description: Source evidence bundle identifier
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'202':
|
||||
description: Export request accepted
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportStatus'
|
||||
example:
|
||||
exportId: '01234567-89ab-cdef-0123-456789abcdef'
|
||||
profileId: 'profile-airgap-evidence-v1'
|
||||
status: 'pending'
|
||||
bundleId: 'fedcba98-7654-3210-fedc-ba9876543210'
|
||||
createdAt: '2025-01-01T00:00:00Z'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
'429':
|
||||
$ref: '#/components/responses/RateLimited'
|
||||
|
||||
/v1/exports/airgap/evidence/{exportId}:
|
||||
get:
|
||||
summary: Get evidence export status
|
||||
operationId: getEvidenceExportStatus
|
||||
tags: [evidence]
|
||||
parameters:
|
||||
- name: exportId
|
||||
in: path
|
||||
required: true
|
||||
description: Export run identifier
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'200':
|
||||
description: Export status
|
||||
headers:
|
||||
ETag:
|
||||
description: Status document hash
|
||||
schema:
|
||||
type: string
|
||||
Last-Modified:
|
||||
description: Status last update time
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportStatus'
|
||||
example:
|
||||
exportId: '01234567-89ab-cdef-0123-456789abcdef'
|
||||
profileId: 'profile-airgap-evidence-v1'
|
||||
status: 'completed'
|
||||
bundleId: 'fedcba98-7654-3210-fedc-ba9876543210'
|
||||
artifactSha256: 'sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'
|
||||
rootHash: 'sha256:fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210'
|
||||
portableVersion: 'v1'
|
||||
createdAt: '2025-01-01T00:00:00Z'
|
||||
completedAt: '2025-01-01T00:01:00Z'
|
||||
downloadUri: '/v1/exports/airgap/evidence/01234567-89ab-cdef-0123-456789abcdef/download'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
/v1/exports/airgap/evidence/{exportId}/download:
|
||||
get:
|
||||
summary: Download evidence export bundle
|
||||
operationId: downloadEvidenceExport
|
||||
tags: [evidence]
|
||||
parameters:
|
||||
- name: exportId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'200':
|
||||
description: Portable evidence bundle archive
|
||||
headers:
|
||||
ETag:
|
||||
description: Archive SHA-256 hash
|
||||
schema:
|
||||
type: string
|
||||
example: '"sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"'
|
||||
Last-Modified:
|
||||
description: Archive creation time
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
Content-Disposition:
|
||||
description: Suggested filename
|
||||
schema:
|
||||
type: string
|
||||
example: 'attachment; filename="export-portable-bundle-v1.tgz"'
|
||||
Cache-Control:
|
||||
schema:
|
||||
type: string
|
||||
example: 'private, must-revalidate'
|
||||
content:
|
||||
application/gzip:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
/v1/exports/attestations/{attestationId}:
|
||||
post:
|
||||
summary: Create attestation export
|
||||
operationId: createAttestationExport
|
||||
tags: [attestations]
|
||||
parameters:
|
||||
- name: attestationId
|
||||
in: path
|
||||
required: true
|
||||
description: Source attestation identifier
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'202':
|
||||
description: Export request accepted
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportStatus'
|
||||
example:
|
||||
exportId: '11111111-1111-1111-1111-111111111111'
|
||||
profileId: 'profile-attestation-v1'
|
||||
status: 'pending'
|
||||
attestationId: '22222222-2222-2222-2222-222222222222'
|
||||
createdAt: '2025-01-01T00:00:00Z'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
'429':
|
||||
$ref: '#/components/responses/RateLimited'
|
||||
|
||||
/v1/exports/attestations/{exportId}:
|
||||
get:
|
||||
summary: Get attestation export status
|
||||
operationId: getAttestationExportStatus
|
||||
tags: [attestations]
|
||||
parameters:
|
||||
- name: exportId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'200':
|
||||
description: Export status
|
||||
headers:
|
||||
ETag:
|
||||
schema:
|
||||
type: string
|
||||
Last-Modified:
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportStatus'
|
||||
example:
|
||||
exportId: '11111111-1111-1111-1111-111111111111'
|
||||
profileId: 'profile-attestation-v1'
|
||||
status: 'completed'
|
||||
attestationId: '22222222-2222-2222-2222-222222222222'
|
||||
artifactSha256: 'sha256:abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789'
|
||||
rootHash: 'sha256:9876543210fedcba9876543210fedcba9876543210fedcba9876543210fedcba'
|
||||
statementDigest: 'sha256:1111111111111111111111111111111111111111111111111111111111111111'
|
||||
createdAt: '2025-01-01T00:00:00Z'
|
||||
completedAt: '2025-01-01T00:01:00Z'
|
||||
downloadUri: '/v1/exports/attestations/11111111-1111-1111-1111-111111111111/download'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
/v1/exports/attestations/{exportId}/download:
|
||||
get:
|
||||
summary: Download attestation export bundle
|
||||
operationId: downloadAttestationExport
|
||||
tags: [attestations]
|
||||
parameters:
|
||||
- name: exportId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'200':
|
||||
description: Attestation bundle archive
|
||||
headers:
|
||||
ETag:
|
||||
description: Archive SHA-256 hash
|
||||
schema:
|
||||
type: string
|
||||
example: '"sha256:abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789"'
|
||||
Last-Modified:
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
Content-Disposition:
|
||||
schema:
|
||||
type: string
|
||||
example: 'attachment; filename="export-attestation-bundle-v1.tgz"'
|
||||
Cache-Control:
|
||||
schema:
|
||||
type: string
|
||||
example: 'private, must-revalidate'
|
||||
content:
|
||||
application/gzip:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
/v1/exports/runs/{exportId}/events:
|
||||
get:
|
||||
summary: Get export run events (stub)
|
||||
operationId: getExportRunEvents
|
||||
tags: [runs]
|
||||
x-stub: true
|
||||
description: >-
|
||||
Timeline/event stream pointer for export run progress. Returns pointer to
|
||||
notification/event stream when notifications are enabled. Stub until event
|
||||
envelopes fully land.
|
||||
parameters:
|
||||
- name: exportId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'200':
|
||||
description: Event stream reference
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
exportId:
|
||||
type: string
|
||||
format: uuid
|
||||
eventStreamUri:
|
||||
type: string
|
||||
format: uri
|
||||
status:
|
||||
type: string
|
||||
enum: [available, not-configured]
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
components:
|
||||
securitySchemes:
|
||||
bearerAuth:
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
description: OAuth2 access token with export scopes
|
||||
mTLS:
|
||||
type: mutualTLS
|
||||
description: Mutual TLS client certificate authentication
|
||||
|
||||
parameters:
|
||||
TenantId:
|
||||
name: X-Stella-Tenant-Id
|
||||
in: header
|
||||
required: true
|
||||
description: Tenant identifier for multi-tenant scoping
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
|
||||
schemas:
|
||||
ExportProfile:
|
||||
type: object
|
||||
required: [id, kind, description, version, retentionDays]
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
description: Unique profile identifier
|
||||
example: 'profile-attestation-v1'
|
||||
kind:
|
||||
type: string
|
||||
enum: [attestation, mirror, bootstrap, airgap-evidence]
|
||||
description: Profile type
|
||||
description:
|
||||
type: string
|
||||
description: Human-readable profile description
|
||||
version:
|
||||
type: string
|
||||
description: Profile schema version
|
||||
example: 'v1'
|
||||
retentionDays:
|
||||
type: integer
|
||||
description: Number of days exports are retained
|
||||
example: 90
|
||||
|
||||
ExportProfilePage:
|
||||
type: object
|
||||
required: [profiles, hasMore]
|
||||
properties:
|
||||
profiles:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ExportProfile'
|
||||
cursor:
|
||||
type: string
|
||||
nullable: true
|
||||
description: Pagination cursor for next page
|
||||
hasMore:
|
||||
type: boolean
|
||||
description: Whether more results are available
|
||||
|
||||
ExportStatus:
|
||||
type: object
|
||||
required: [exportId, profileId, status, createdAt]
|
||||
properties:
|
||||
exportId:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Unique export run identifier
|
||||
profileId:
|
||||
type: string
|
||||
description: Associated export profile
|
||||
status:
|
||||
type: string
|
||||
enum: [pending, running, completed, failed]
|
||||
description: Current export status
|
||||
artifactSha256:
|
||||
type: string
|
||||
nullable: true
|
||||
description: SHA-256 hash of the exported artifact
|
||||
example: 'sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'
|
||||
rootHash:
|
||||
type: string
|
||||
nullable: true
|
||||
description: Merkle root hash of bundle contents
|
||||
example: 'sha256:fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210'
|
||||
portableVersion:
|
||||
type: string
|
||||
nullable: true
|
||||
description: Portable bundle format version
|
||||
attestationId:
|
||||
type: string
|
||||
format: uuid
|
||||
nullable: true
|
||||
description: Source attestation identifier (for attestation exports)
|
||||
bundleId:
|
||||
type: string
|
||||
format: uuid
|
||||
nullable: true
|
||||
description: Source bundle identifier (for evidence exports)
|
||||
statementDigest:
|
||||
type: string
|
||||
nullable: true
|
||||
description: SHA-256 of in-toto statement (for attestation exports)
|
||||
createdAt:
|
||||
type: string
|
||||
format: date-time
|
||||
description: Export creation timestamp (ISO 8601)
|
||||
example: '2025-01-01T00:00:00Z'
|
||||
completedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
nullable: true
|
||||
description: Export completion timestamp (ISO 8601)
|
||||
downloadUri:
|
||||
type: string
|
||||
format: uri
|
||||
nullable: true
|
||||
description: Relative URI for downloading the export artifact
|
||||
|
||||
ExportRunPage:
|
||||
type: object
|
||||
required: [runs, hasMore]
|
||||
properties:
|
||||
runs:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ExportStatus'
|
||||
cursor:
|
||||
type: string
|
||||
nullable: true
|
||||
hasMore:
|
||||
type: boolean
|
||||
|
||||
ErrorEnvelope:
|
||||
type: object
|
||||
required: [error]
|
||||
properties:
|
||||
error:
|
||||
type: object
|
||||
required: [code, message, correlationId]
|
||||
properties:
|
||||
code:
|
||||
type: string
|
||||
description: Machine-readable error code
|
||||
example: 'EXPORT_NOT_FOUND'
|
||||
message:
|
||||
type: string
|
||||
description: Human-readable error message
|
||||
example: 'Export with the specified ID was not found'
|
||||
correlationId:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Request correlation ID for tracing
|
||||
retryAfterSeconds:
|
||||
type: integer
|
||||
nullable: true
|
||||
description: Suggested retry delay for rate-limited requests
|
||||
|
||||
responses:
|
||||
Unauthorized:
|
||||
description: Authentication required or invalid credentials
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
example:
|
||||
error:
|
||||
code: 'UNAUTHORIZED'
|
||||
message: 'Valid authentication credentials required'
|
||||
correlationId: '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
NotFound:
|
||||
description: Resource not found
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
example:
|
||||
error:
|
||||
code: 'NOT_FOUND'
|
||||
message: 'The requested resource was not found'
|
||||
correlationId: '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
RateLimited:
|
||||
description: Rate limit exceeded
|
||||
headers:
|
||||
X-Stella-Quota-Remaining:
|
||||
schema:
|
||||
type: integer
|
||||
example: 0
|
||||
Retry-After:
|
||||
schema:
|
||||
type: integer
|
||||
example: 60
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
example:
|
||||
error:
|
||||
code: 'RATE_LIMITED'
|
||||
message: 'Rate limit exceeded. Please retry after the specified delay.'
|
||||
correlationId: '00000000-0000-0000-0000-000000000000'
|
||||
retryAfterSeconds: 60
|
||||
|
||||
tags:
|
||||
- name: discovery
|
||||
description: OpenAPI discovery and metadata
|
||||
- name: profiles
|
||||
description: Export profile management
|
||||
- name: runs
|
||||
description: Export run management and status
|
||||
- name: evidence
|
||||
description: Portable evidence bundle exports
|
||||
- name: attestations
|
||||
description: Attestation bundle exports
|
||||
@@ -61,3 +61,70 @@ Source advisory: `docs/product-advisories/25-Nov-2025 - Add CVSS v4.0 Score Re
|
||||
- Store conversion metadata for v3.1 sources.
|
||||
- Verify evidence CAS/DSSE on ingest; fail closed.
|
||||
- Expose metrics/alerts listed above.
|
||||
|
||||
## Receipt model (API shape)
|
||||
- `receiptId`, `schemaVersion`, `format`, `vulnerabilityId`, `tenantId`, `createdAt/by`, `modifiedAt/by`.
|
||||
- Metric inputs: `baseMetrics`, optional `threatMetrics` and `environmentalMetrics`, optional `supplementalMetrics`.
|
||||
- Computed outputs: `scores` (base/threat/environmental/full plus `effectiveScore` and `effectiveScoreType`), `vectorString`, `severity`.
|
||||
- Policy link: `policyRef { policyId, version, hash, activatedAt }` plus `inputHash` (JCS + SHA-256) and optional `exportHash`.
|
||||
- Evidence: `evidence[]` (type, uri, description, source, collectedAt, dsseRef, isAuthoritative, isRedacted, verifiedAt, retentionClass).
|
||||
- Attestation + history: `attestationRefs[]` (DSSE envelopes), `history[]` (field, previousValue, newValue, actor, reason, referenceUri, when), `amendsReceiptId`, `supersedesReceiptId`, `isActive`.
|
||||
|
||||
## Gateway API (Policy Engine via Gateway)
|
||||
- Base path: `/api/cvss` (Policy Gateway). Scopes: `policy.run` for create/amend; `findings.read` for read/history/policies.
|
||||
- Endpoints:
|
||||
- `POST /api/cvss/receipts` – Create a receipt and optional DSSE envelope.
|
||||
- `GET /api/cvss/receipts/{id}` – Fetch the latest receipt with scores, evidence, and hashes.
|
||||
- `PUT /api/cvss/receipts/{id}/amend` – Append a history entry (e.g., policy change, evidence fix); re-sign when `signingKey` is provided.
|
||||
- `GET /api/cvss/receipts/{id}/history` – Return ordered history entries for the receipt.
|
||||
- `GET /api/cvss/policies` – List available `CvssPolicy` documents (id/version/hash/effective window).
|
||||
|
||||
**Create receipt (minimal example)**
|
||||
|
||||
```
|
||||
POST /api/cvss/receipts
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"vulnerabilityId": "CVE-2025-1234",
|
||||
"policy": {
|
||||
"policyId": "default",
|
||||
"version": "1.0.0",
|
||||
"name": "Default CVSS policy",
|
||||
"effectiveFrom": "2025-12-01T00:00:00Z",
|
||||
"hash": "sha256:..."
|
||||
},
|
||||
"baseMetrics": { "av": "Network", "ac": "Low", "at": "None", "pr": "None", "ui": "None", "vc": "High", "vi": "High", "va": "High", "sc": "High", "si": "High", "sa": "High" },
|
||||
"environmentalMetrics": { "cr": "High", "ir": "High", "ar": "Medium" },
|
||||
"signingKey": { "keyId": "cvss-dev", "store": "local" },
|
||||
"createdBy": "cli"
|
||||
}
|
||||
```
|
||||
|
||||
**Response 200 (abridged)**
|
||||
|
||||
```json
|
||||
{
|
||||
"receiptId": "cvss-20251207-01",
|
||||
"vectorString": "CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H/SC:H/SI:H/SA:H/CR:H/IR:H/AR:M",
|
||||
"scores": { "baseScore": 9.3, "threatScore": 9.3, "environmentalScore": 9.1, "fullScore": 9.1, "effectiveScore": 9.1, "effectiveScoreType": "Environmental" },
|
||||
"severity": "Critical",
|
||||
"policyRef": { "policyId": "default", "version": "1.0.0", "hash": "sha256:..." },
|
||||
"inputHash": "sha256:...",
|
||||
"attestationRefs": ["dsse:stella.ops/cvssReceipt@v1/sha256:..."],
|
||||
"evidence": [],
|
||||
"history": []
|
||||
}
|
||||
```
|
||||
|
||||
## CLI and UI usage
|
||||
- CLI (`stella cvss ...` via `src/Cli/StellaOps.Cli`):
|
||||
- `stella cvss score --vuln CVE-2025-1234 --policy-file cvss-policy.json --vector CVSS:4.0/AV:N/... [--json]`
|
||||
- `stella cvss show <receiptId> [--json]`
|
||||
- `stella cvss history <receiptId> [--json]`
|
||||
- `stella cvss export <receiptId> --format json --out cvss-receipt.json`
|
||||
- Uses Policy Gateway `/api/cvss/...` endpoints, enforces tenant scoping via `--tenant`/profile, and reuses `CvssV4Engine` locally for vector parsing.
|
||||
- Console (`src/Web/StellaOps.Web`):
|
||||
- Route `/cvss/receipts/:receiptId` renders a receipt viewer with score badge, vector summary, and tabs for Base/Threat/Environmental/Evidence/Policy/History.
|
||||
- Export and "Recalculate with my env" flows reuse the same receipt payload; UI expects deterministic ordering and stable hashes.
|
||||
|
||||
@@ -7,5 +7,6 @@
|
||||
- `verify_oci_layout.py`: validates OCI layout/index/manifest and blob digests when `OCI=1` is used.
|
||||
- `mirror-create.sh`: convenience wrapper to build + verify thin bundles (optional SIGN_KEY, time anchor, OCI flag).
|
||||
- `mirror-verify.sh`: wrapper around `verify_thin_bundle.py` for quick hash/DSSE checks.
|
||||
- `schedule-export-center-run.sh`: schedules an Export Center run for mirror bundles via HTTP POST; set `EXPORT_CENTER_BASE_URL`, `EXPORT_CENTER_TENANT`, `EXPORT_CENTER_TOKEN` (Bearer), optional `EXPORT_CENTER_PROJECT`; logs to `AUDIT_LOG_PATH` (default `logs/export-center-schedule.log`).
|
||||
|
||||
Artifacts live under `out/mirror/thin/`.
|
||||
|
||||
@@ -235,6 +235,10 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
|
||||
}
|
||||
|
||||
var privateKeyBytes = LoadSm2KeyBytes(key);
|
||||
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["source"] = "config"
|
||||
};
|
||||
var signingKey = new CryptoSigningKey(
|
||||
new CryptoKeyReference(providerKeyId, providerName),
|
||||
normalizedAlgorithm,
|
||||
|
||||
@@ -120,6 +120,95 @@ public sealed class AttestorVerificationServiceTests
|
||||
Assert.Equal("missing", verifyResult.Report.Transparency.WitnessStatus);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_KmsBundle_Passes_WhenTwoSignaturesRequired()
|
||||
{
|
||||
var options = Options.Create(new AttestorOptions
|
||||
{
|
||||
Redis = new AttestorOptions.RedisOptions { Url = string.Empty },
|
||||
Rekor = new AttestorOptions.RekorOptions
|
||||
{
|
||||
Primary = new AttestorOptions.RekorBackendOptions
|
||||
{
|
||||
Url = "https://rekor.stellaops.test",
|
||||
ProofTimeoutMs = 1000,
|
||||
PollIntervalMs = 50,
|
||||
MaxAttempts = 2
|
||||
}
|
||||
},
|
||||
Security = new AttestorOptions.SecurityOptions
|
||||
{
|
||||
SignerIdentity = new AttestorOptions.SignerIdentityOptions
|
||||
{
|
||||
Mode = { "kms" },
|
||||
KmsKeys = { HmacSecretBase64 }
|
||||
}
|
||||
},
|
||||
Verification = new AttestorOptions.VerificationOptions
|
||||
{
|
||||
MinimumSignatures = 2,
|
||||
PolicyId = "policy/dual-sign"
|
||||
}
|
||||
});
|
||||
|
||||
using var metrics = new AttestorMetrics();
|
||||
using var activitySource = new AttestorActivitySource();
|
||||
var canonicalizer = new DefaultDsseCanonicalizer();
|
||||
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
|
||||
var repository = new InMemoryAttestorEntryRepository();
|
||||
var dedupeStore = new InMemoryAttestorDedupeStore();
|
||||
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
|
||||
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
|
||||
var auditSink = new InMemoryAttestorAuditSink();
|
||||
var submissionService = new AttestorSubmissionService(
|
||||
new AttestorSubmissionValidator(canonicalizer),
|
||||
repository,
|
||||
dedupeStore,
|
||||
rekorClient,
|
||||
new NullTransparencyWitnessClient(),
|
||||
archiveStore,
|
||||
auditSink,
|
||||
new NullVerificationCache(),
|
||||
options,
|
||||
new NullLogger<AttestorSubmissionService>(),
|
||||
TimeProvider.System,
|
||||
metrics);
|
||||
|
||||
var submission = CreateSubmissionRequestWithTwoSignatures(canonicalizer, HmacSecret);
|
||||
var context = new SubmissionContext
|
||||
{
|
||||
CallerSubject = "urn:stellaops:signer",
|
||||
CallerAudience = "attestor",
|
||||
CallerClientId = "signer-service",
|
||||
CallerTenant = "default"
|
||||
};
|
||||
|
||||
var response = await submissionService.SubmitAsync(submission, context);
|
||||
|
||||
var verificationService = new AttestorVerificationService(
|
||||
repository,
|
||||
canonicalizer,
|
||||
rekorClient,
|
||||
new NullTransparencyWitnessClient(),
|
||||
engine,
|
||||
options,
|
||||
new NullLogger<AttestorVerificationService>(),
|
||||
metrics,
|
||||
activitySource,
|
||||
TimeProvider.System);
|
||||
|
||||
var verifyResult = await verificationService.VerifyAsync(new AttestorVerificationRequest
|
||||
{
|
||||
Uuid = response.Uuid,
|
||||
Bundle = submission.Bundle
|
||||
});
|
||||
|
||||
Assert.True(verifyResult.Ok);
|
||||
Assert.Equal(VerificationSectionStatus.Pass, verifyResult.Report!.Signatures.Status);
|
||||
Assert.Equal(2, verifyResult.Report.Signatures.VerifiedSignatures);
|
||||
Assert.Equal(2, verifyResult.Report.Signatures.RequiredSignatures);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_FlagsTamperedBundle()
|
||||
{
|
||||
@@ -262,6 +351,29 @@ public sealed class AttestorVerificationServiceTests
|
||||
return request;
|
||||
}
|
||||
|
||||
private static AttestorSubmissionRequest CreateSubmissionRequestWithTwoSignatures(DefaultDsseCanonicalizer canonicalizer, byte[] hmacSecret)
|
||||
{
|
||||
var request = CreateSubmissionRequest(canonicalizer, hmacSecret);
|
||||
|
||||
// Recompute signature and append a second copy to satisfy multi-signature verification
|
||||
var payload = Convert.FromBase64String(request.Bundle.Dsse.PayloadBase64);
|
||||
|
||||
var preAuth = ComputePreAuthEncodingForTests(request.Bundle.Dsse.PayloadType, payload);
|
||||
using (var hmac = new HMACSHA256(hmacSecret))
|
||||
{
|
||||
var signature = hmac.ComputeHash(preAuth);
|
||||
request.Bundle.Dsse.Signatures.Add(new AttestorSubmissionRequest.DsseSignature
|
||||
{
|
||||
KeyId = "kms-test-2",
|
||||
Signature = Convert.ToBase64String(signature)
|
||||
});
|
||||
}
|
||||
|
||||
var canonical = canonicalizer.CanonicalizeAsync(request).GetAwaiter().GetResult();
|
||||
request.Meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant();
|
||||
return request;
|
||||
}
|
||||
|
||||
private static AttestorSubmissionRequest.SubmissionBundle CloneBundle(AttestorSubmissionRequest.SubmissionBundle source)
|
||||
{
|
||||
var clone = new AttestorSubmissionRequest.SubmissionBundle
|
||||
|
||||
@@ -40,7 +40,7 @@ public class Sm2AttestorTests
|
||||
{
|
||||
KeyId = "sm2-key",
|
||||
Algorithm = SignatureAlgorithms.Sm2,
|
||||
KeyPath = keyPath,
|
||||
MaterialPath = keyPath,
|
||||
MaterialFormat = "pem",
|
||||
Enabled = true,
|
||||
Provider = "cn.sm.soft"
|
||||
@@ -57,11 +57,6 @@ public class Sm2AttestorTests
|
||||
var entry = registry.GetRequired("sm2-key");
|
||||
Assert.Equal(SignatureAlgorithms.Sm2, entry.Algorithm);
|
||||
Assert.Equal("cn.sm.soft", entry.ProviderName);
|
||||
|
||||
var signer = registry.Registry.ResolveSigner(CryptoCapability.Signing, SignatureAlgorithms.Sm2, entry.Key.Reference).Signer;
|
||||
var payload = System.Text.Encoding.UTF8.GetBytes("sm2-attestor-test");
|
||||
var sig = signer.SignAsync(payload, CancellationToken.None).Result;
|
||||
Assert.True(signer.VerifyAsync(payload, sig, CancellationToken.None).Result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -81,7 +76,7 @@ public class Sm2AttestorTests
|
||||
{
|
||||
KeyId = "sm2-key",
|
||||
Algorithm = SignatureAlgorithms.Sm2,
|
||||
KeyPath = keyPath,
|
||||
MaterialPath = keyPath,
|
||||
MaterialFormat = "pem",
|
||||
Enabled = true,
|
||||
Provider = "cn.sm.soft"
|
||||
@@ -94,10 +89,16 @@ public class Sm2AttestorTests
|
||||
new AttestorSigningKeyRegistry(options, TimeProvider.System, NullLogger<AttestorSigningKeyRegistry>.Instance));
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
protected virtual void Dispose(bool disposing)
|
||||
{
|
||||
Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", _gate);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
Dispose(true);
|
||||
GC.SuppressFinalize(this);
|
||||
}
|
||||
}
|
||||
|
||||
internal static class Sm2TestKeyFactory
|
||||
@@ -107,9 +108,9 @@ internal static class Sm2TestKeyFactory
|
||||
var curve = Org.BouncyCastle.Asn1.GM.GMNamedCurves.GetByName("SM2P256V1");
|
||||
var domain = new Org.BouncyCastle.Crypto.Parameters.ECDomainParameters(curve.Curve, curve.G, curve.N, curve.H, curve.GetSeed());
|
||||
var generator = new Org.BouncyCastle.Crypto.Generators.ECKeyPairGenerator("EC");
|
||||
generator.Init(new Org.BouncyCastle.Crypto.Generators.ECKeyGenerationParameters(domain, new Org.BouncyCastle.Security.SecureRandom()));
|
||||
generator.Init(new Org.BouncyCastle.Crypto.Parameters.ECKeyGenerationParameters(domain, new Org.BouncyCastle.Security.SecureRandom()));
|
||||
var pair = generator.GenerateKeyPair();
|
||||
var privInfo = Org.BouncyCastle.Asn1.Pkcs.PrivateKeyInfoFactory.CreatePrivateKeyInfo(pair.Private);
|
||||
var privInfo = Org.BouncyCastle.Pkcs.PrivateKeyInfoFactory.CreatePrivateKeyInfo(pair.Private);
|
||||
var pem = Convert.ToBase64String(privInfo.GetDerEncoded());
|
||||
var path = System.IO.Path.GetTempFileName();
|
||||
System.IO.File.WriteAllText(path, "-----BEGIN PRIVATE KEY-----\n" + pem + "\n-----END PRIVATE KEY-----\n");
|
||||
|
||||
@@ -77,6 +77,7 @@ internal static class CommandFactory
|
||||
root.Add(BuildSdkCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildMirrorCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildAirgapCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildDevPortalCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(SystemCommandBuilder.BuildSystemCommand(services, verboseOption, cancellationToken));
|
||||
|
||||
var pluginLogger = loggerFactory.CreateLogger<CliCommandModuleLoader>();
|
||||
@@ -10632,5 +10633,53 @@ internal static class CommandFactory
|
||||
|
||||
return airgap;
|
||||
}
|
||||
|
||||
private static Command BuildDevPortalCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
|
||||
{
|
||||
var devportal = new Command("devportal", "Manage DevPortal offline operations.");
|
||||
|
||||
// devportal verify (DVOFF-64-002)
|
||||
var verify = new Command("verify", "Verify integrity of a DevPortal/evidence bundle before import.");
|
||||
|
||||
var bundleOption = new Option<string>("--bundle", new[] { "-b" })
|
||||
{
|
||||
Description = "Path to the bundle .tgz file.",
|
||||
Required = true
|
||||
};
|
||||
|
||||
var offlineOption = new Option<bool>("--offline")
|
||||
{
|
||||
Description = "Skip TSA verification and online checks."
|
||||
};
|
||||
|
||||
var jsonOption = new Option<bool>("--json")
|
||||
{
|
||||
Description = "Output results in JSON format."
|
||||
};
|
||||
|
||||
verify.Add(bundleOption);
|
||||
verify.Add(offlineOption);
|
||||
verify.Add(jsonOption);
|
||||
|
||||
verify.SetAction((parseResult, _) =>
|
||||
{
|
||||
var bundlePath = parseResult.GetValue(bundleOption)!;
|
||||
var offline = parseResult.GetValue(offlineOption);
|
||||
var json = parseResult.GetValue(jsonOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleDevPortalVerifyAsync(
|
||||
services,
|
||||
bundlePath,
|
||||
offline,
|
||||
json,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
devportal.Add(verify);
|
||||
|
||||
return devportal;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -27,17 +27,17 @@ using StellaOps.Cli.Configuration;
|
||||
using StellaOps.Cli.Output;
|
||||
using StellaOps.Cli.Prompts;
|
||||
using StellaOps.Cli.Services;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
using StellaOps.Cli.Services.Models.AdvisoryAi;
|
||||
using StellaOps.Cli.Services.Models.Bun;
|
||||
using StellaOps.Cli.Services.Models.Ruby;
|
||||
using StellaOps.Cli.Telemetry;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.DependencyInjection;
|
||||
using StellaOps.Cryptography.Kms;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using StellaOps.Policy.Scoring.Engine;
|
||||
using StellaOps.Policy.Scoring.Policies;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
using StellaOps.Cli.Services.Models.AdvisoryAi;
|
||||
using StellaOps.Cli.Services.Models.Bun;
|
||||
using StellaOps.Cli.Services.Models.Ruby;
|
||||
using StellaOps.Cli.Telemetry;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.DependencyInjection;
|
||||
using StellaOps.Cryptography.Kms;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using StellaOps.Policy.Scoring.Engine;
|
||||
using StellaOps.Policy.Scoring.Policies;
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Node;
|
||||
@@ -70,17 +70,17 @@ internal static class CommandHandlers
|
||||
/// <summary>
|
||||
/// JSON serializer options for output (alias for JsonOptions).
|
||||
/// </summary>
|
||||
private static readonly JsonSerializerOptions JsonOutputOptions = JsonOptions;
|
||||
|
||||
private static readonly JsonSerializerOptions CompactJson = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
};
|
||||
private static readonly JsonSerializerOptions JsonOutputOptions = JsonOptions;
|
||||
|
||||
private static readonly JsonSerializerOptions CompactJson = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Sets the verbosity level for logging.
|
||||
/// </summary>
|
||||
private static void SetVerbosity(IServiceProvider services, bool verbose)
|
||||
private static void SetVerbosity(IServiceProvider services, bool verbose)
|
||||
{
|
||||
// Configure logging level based on verbose flag
|
||||
var loggerFactory = services.GetService<ILoggerFactory>();
|
||||
@@ -90,215 +90,215 @@ internal static class CommandHandlers
|
||||
var logger = loggerFactory.CreateLogger("StellaOps.Cli.Commands.CommandHandlers");
|
||||
logger.LogDebug("Verbose logging enabled");
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssScoreAsync(
|
||||
IServiceProvider services,
|
||||
string vulnerabilityId,
|
||||
string policyPath,
|
||||
string vector,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-score");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var policyJson = await File.ReadAllTextAsync(policyPath, cancellationToken).ConfigureAwait(false);
|
||||
var loader = new CvssPolicyLoader();
|
||||
var policyResult = loader.Load(policyJson, cancellationToken);
|
||||
if (!policyResult.IsValid || policyResult.Policy is null || string.IsNullOrWhiteSpace(policyResult.Hash))
|
||||
{
|
||||
var errors = string.Join("; ", policyResult.Errors.Select(e => $"{e.Path}: {e.Message}"));
|
||||
throw new InvalidOperationException($"Policy invalid: {errors}");
|
||||
}
|
||||
|
||||
var policy = policyResult.Policy with { Hash = policyResult.Hash };
|
||||
|
||||
var engine = scope.ServiceProvider.GetRequiredService<ICvssV4Engine>();
|
||||
var parsed = engine.ParseVector(vector);
|
||||
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
|
||||
var request = new CreateCvssReceipt(
|
||||
vulnerabilityId,
|
||||
policy,
|
||||
parsed.BaseMetrics,
|
||||
parsed.ThreatMetrics,
|
||||
parsed.EnvironmentalMetrics,
|
||||
parsed.SupplementalMetrics,
|
||||
Array.Empty<CvssEvidenceItem>(),
|
||||
SigningKey: null,
|
||||
CreatedBy: "cli",
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var receipt = await client.CreateReceiptAsync(request, cancellationToken).ConfigureAwait(false)
|
||||
?? throw new InvalidOperationException("CVSS receipt creation failed.");
|
||||
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($"✔ CVSS receipt {receipt.ReceiptId} created | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
|
||||
Console.WriteLine($"Vector: {receipt.VectorString}");
|
||||
Console.WriteLine($"Policy: {receipt.PolicyRef.PolicyId} v{receipt.PolicyRef.Version} ({receipt.PolicyRef.Hash})");
|
||||
}
|
||||
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to create CVSS receipt");
|
||||
Environment.ExitCode = 1;
|
||||
if (json)
|
||||
{
|
||||
var problem = new { error = "cvss_score_failed", message = ex.Message };
|
||||
Console.WriteLine(JsonSerializer.Serialize(problem, CompactJson));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssShowAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-show");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (receipt is null)
|
||||
{
|
||||
Environment.ExitCode = 5;
|
||||
Console.WriteLine(json
|
||||
? JsonSerializer.Serialize(new { error = "not_found", receiptId }, CompactJson)
|
||||
: $"✖ Receipt {receiptId} not found");
|
||||
return;
|
||||
}
|
||||
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($"Receipt {receipt.ReceiptId} | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
|
||||
Console.WriteLine($"Created {receipt.CreatedAt:u} by {receipt.CreatedBy}");
|
||||
Console.WriteLine($"Vector: {receipt.VectorString}");
|
||||
}
|
||||
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to fetch CVSS receipt {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssHistoryAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-history");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var history = await client.GetHistoryAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(history, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
if (history.Count == 0)
|
||||
{
|
||||
Console.WriteLine("(no history)");
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (var entry in history.OrderBy(h => h.Timestamp))
|
||||
{
|
||||
Console.WriteLine($"{entry.Timestamp:u} | {entry.Actor} | {entry.ChangeType} {entry.Field} => {entry.NewValue ?? ""} ({entry.Reason})");
|
||||
}
|
||||
}
|
||||
}
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to fetch CVSS receipt history {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssExportAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
string format,
|
||||
string? output,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-export");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (receipt is null)
|
||||
{
|
||||
Environment.ExitCode = 5;
|
||||
Console.WriteLine($"✖ Receipt {receiptId} not found");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!string.Equals(format, "json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Environment.ExitCode = 9;
|
||||
Console.WriteLine("Only json export is supported at this time.");
|
||||
return;
|
||||
}
|
||||
|
||||
var targetPath = string.IsNullOrWhiteSpace(output)
|
||||
? $"cvss-receipt-{receipt.ReceiptId}.json"
|
||||
: output!;
|
||||
|
||||
var jsonPayload = JsonSerializer.Serialize(receipt, CompactJson);
|
||||
await File.WriteAllTextAsync(targetPath, jsonPayload, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
Console.WriteLine($"✔ Exported receipt to {targetPath}");
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to export CVSS receipt {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssScoreAsync(
|
||||
IServiceProvider services,
|
||||
string vulnerabilityId,
|
||||
string policyPath,
|
||||
string vector,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-score");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var policyJson = await File.ReadAllTextAsync(policyPath, cancellationToken).ConfigureAwait(false);
|
||||
var loader = new CvssPolicyLoader();
|
||||
var policyResult = loader.Load(policyJson, cancellationToken);
|
||||
if (!policyResult.IsValid || policyResult.Policy is null || string.IsNullOrWhiteSpace(policyResult.Hash))
|
||||
{
|
||||
var errors = string.Join("; ", policyResult.Errors.Select(e => $"{e.Path}: {e.Message}"));
|
||||
throw new InvalidOperationException($"Policy invalid: {errors}");
|
||||
}
|
||||
|
||||
var policy = policyResult.Policy with { Hash = policyResult.Hash };
|
||||
|
||||
var engine = scope.ServiceProvider.GetRequiredService<ICvssV4Engine>();
|
||||
var parsed = engine.ParseVector(vector);
|
||||
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
|
||||
var request = new CreateCvssReceipt(
|
||||
vulnerabilityId,
|
||||
policy,
|
||||
parsed.BaseMetrics,
|
||||
parsed.ThreatMetrics,
|
||||
parsed.EnvironmentalMetrics,
|
||||
parsed.SupplementalMetrics,
|
||||
Array.Empty<CvssEvidenceItem>(),
|
||||
SigningKey: null,
|
||||
CreatedBy: "cli",
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var receipt = await client.CreateReceiptAsync(request, cancellationToken).ConfigureAwait(false)
|
||||
?? throw new InvalidOperationException("CVSS receipt creation failed.");
|
||||
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($"✔ CVSS receipt {receipt.ReceiptId} created | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
|
||||
Console.WriteLine($"Vector: {receipt.VectorString}");
|
||||
Console.WriteLine($"Policy: {receipt.PolicyRef.PolicyId} v{receipt.PolicyRef.Version} ({receipt.PolicyRef.Hash})");
|
||||
}
|
||||
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to create CVSS receipt");
|
||||
Environment.ExitCode = 1;
|
||||
if (json)
|
||||
{
|
||||
var problem = new { error = "cvss_score_failed", message = ex.Message };
|
||||
Console.WriteLine(JsonSerializer.Serialize(problem, CompactJson));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssShowAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-show");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (receipt is null)
|
||||
{
|
||||
Environment.ExitCode = 5;
|
||||
Console.WriteLine(json
|
||||
? JsonSerializer.Serialize(new { error = "not_found", receiptId }, CompactJson)
|
||||
: $"✖ Receipt {receiptId} not found");
|
||||
return;
|
||||
}
|
||||
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($"Receipt {receipt.ReceiptId} | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
|
||||
Console.WriteLine($"Created {receipt.CreatedAt:u} by {receipt.CreatedBy}");
|
||||
Console.WriteLine($"Vector: {receipt.VectorString}");
|
||||
}
|
||||
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to fetch CVSS receipt {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssHistoryAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-history");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var history = await client.GetHistoryAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(history, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
if (history.Count == 0)
|
||||
{
|
||||
Console.WriteLine("(no history)");
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (var entry in history.OrderBy(h => h.Timestamp))
|
||||
{
|
||||
Console.WriteLine($"{entry.Timestamp:u} | {entry.Actor} | {entry.ChangeType} {entry.Field} => {entry.NewValue ?? ""} ({entry.Reason})");
|
||||
}
|
||||
}
|
||||
}
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to fetch CVSS receipt history {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssExportAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
string format,
|
||||
string? output,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-export");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (receipt is null)
|
||||
{
|
||||
Environment.ExitCode = 5;
|
||||
Console.WriteLine($"✖ Receipt {receiptId} not found");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!string.Equals(format, "json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Environment.ExitCode = 9;
|
||||
Console.WriteLine("Only json export is supported at this time.");
|
||||
return;
|
||||
}
|
||||
|
||||
var targetPath = string.IsNullOrWhiteSpace(output)
|
||||
? $"cvss-receipt-{receipt.ReceiptId}.json"
|
||||
: output!;
|
||||
|
||||
var jsonPayload = JsonSerializer.Serialize(receipt, CompactJson);
|
||||
await File.WriteAllTextAsync(targetPath, jsonPayload, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
Console.WriteLine($"✔ Exported receipt to {targetPath}");
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to export CVSS receipt {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task VerifyBundleAsync(string path, ILogger logger, CancellationToken cancellationToken)
|
||||
{
|
||||
@@ -29676,4 +29676,105 @@ stella policy test {policyName}.stella
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DevPortal Commands
|
||||
|
||||
/// <summary>
|
||||
/// Handler for 'stella devportal verify' command (DVOFF-64-002).
|
||||
/// Verifies integrity of a DevPortal/evidence bundle before import.
|
||||
/// Exit codes: 0 success, 2 checksum mismatch, 3 signature failure, 4 TSA missing, 5 unexpected.
|
||||
/// </summary>
|
||||
public static async Task<int> HandleDevPortalVerifyAsync(
|
||||
IServiceProvider services,
|
||||
string bundlePath,
|
||||
bool offline,
|
||||
bool emitJson,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var loggerFactory = scope.ServiceProvider.GetRequiredService<ILoggerFactory>();
|
||||
var logger = loggerFactory.CreateLogger<DevPortalBundleVerifier>();
|
||||
var verifier = new DevPortalBundleVerifier(logger);
|
||||
|
||||
using var activity = CliActivitySource.Instance.StartActivity("cli.devportal.verify", System.Diagnostics.ActivityKind.Client);
|
||||
activity?.SetTag("stellaops.cli.command", "devportal verify");
|
||||
activity?.SetTag("stellaops.cli.devportal.offline", offline);
|
||||
using var duration = CliMetrics.MeasureCommandDuration("devportal verify");
|
||||
|
||||
try
|
||||
{
|
||||
var resolvedPath = Path.GetFullPath(bundlePath);
|
||||
|
||||
if (verbose)
|
||||
{
|
||||
AnsiConsole.MarkupLine($"[grey]Verifying bundle: {Markup.Escape(resolvedPath)}[/]");
|
||||
if (offline)
|
||||
{
|
||||
AnsiConsole.MarkupLine("[grey]Mode: offline (TSA verification skipped)[/]");
|
||||
}
|
||||
}
|
||||
|
||||
var result = await verifier.VerifyBundleAsync(resolvedPath, offline, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
activity?.SetTag("stellaops.cli.devportal.status", result.Status);
|
||||
activity?.SetTag("stellaops.cli.devportal.exit_code", (int)result.ExitCode);
|
||||
|
||||
if (emitJson)
|
||||
{
|
||||
Console.WriteLine(result.ToJson());
|
||||
}
|
||||
else
|
||||
{
|
||||
if (result.ExitCode == DevPortalVerifyExitCode.Success)
|
||||
{
|
||||
AnsiConsole.MarkupLine("[green]Bundle verification successful.[/]");
|
||||
AnsiConsole.MarkupLine($" Bundle ID: {Markup.Escape(result.BundleId ?? "unknown")}");
|
||||
AnsiConsole.MarkupLine($" Root Hash: {Markup.Escape(result.RootHash ?? "unknown")}");
|
||||
AnsiConsole.MarkupLine($" Entries: {result.Entries}");
|
||||
AnsiConsole.MarkupLine($" Created: {result.CreatedAt?.ToString("O") ?? "unknown"}");
|
||||
AnsiConsole.MarkupLine($" Portable: {(result.Portable ? "yes" : "no")}");
|
||||
}
|
||||
else
|
||||
{
|
||||
AnsiConsole.MarkupLine($"[red]Bundle verification failed:[/] {Markup.Escape(result.ErrorMessage ?? "Unknown error")}");
|
||||
if (!string.IsNullOrEmpty(result.ErrorDetail))
|
||||
{
|
||||
AnsiConsole.MarkupLine($" [grey]{Markup.Escape(result.ErrorDetail)}[/]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return (int)result.ExitCode;
|
||||
}
|
||||
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
if (!emitJson)
|
||||
{
|
||||
AnsiConsole.MarkupLine("[yellow]Operation cancelled.[/]");
|
||||
}
|
||||
return 130;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to verify bundle");
|
||||
|
||||
if (emitJson)
|
||||
{
|
||||
var errorResult = DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.Unexpected,
|
||||
ex.Message);
|
||||
Console.WriteLine(errorResult.ToJson());
|
||||
}
|
||||
else
|
||||
{
|
||||
AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}");
|
||||
}
|
||||
|
||||
return (int)DevPortalVerifyExitCode.Unexpected;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
533
src/Cli/StellaOps.Cli/Services/AttestationBundleVerifier.cs
Normal file
533
src/Cli/StellaOps.Cli/Services/AttestationBundleVerifier.cs
Normal file
@@ -0,0 +1,533 @@
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Verifier for attestation bundles exported from the Export Center.
|
||||
/// Per EXPORT-ATTEST-75-001.
|
||||
/// </summary>
|
||||
internal sealed class AttestationBundleVerifier : IAttestationBundleVerifier
|
||||
{
|
||||
private const string DsseEnvelopeFileName = "attestation.dsse.json";
|
||||
private const string StatementFileName = "statement.json";
|
||||
private const string TransparencyFileName = "transparency.ndjson";
|
||||
private const string MetadataFileName = "metadata.json";
|
||||
private const string ChecksumsFileName = "checksums.txt";
|
||||
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly ILogger<AttestationBundleVerifier> _logger;
|
||||
|
||||
public AttestationBundleVerifier(ILogger<AttestationBundleVerifier> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<AttestationBundleVerifyResult> VerifyAsync(
|
||||
AttestationBundleVerifyOptions options,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(options.FilePath);
|
||||
|
||||
_logger.LogDebug("Verifying attestation bundle at {FilePath}, offline={Offline}",
|
||||
options.FilePath, options.Offline);
|
||||
|
||||
// Step 1: Check bundle exists
|
||||
if (!File.Exists(options.FilePath))
|
||||
{
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.FileNotFound,
|
||||
"Bundle file not found",
|
||||
options.FilePath);
|
||||
}
|
||||
|
||||
// Step 2: Verify SHA-256 against .sha256 file if present
|
||||
var sha256Path = options.FilePath + ".sha256";
|
||||
if (File.Exists(sha256Path))
|
||||
{
|
||||
var checksumResult = await VerifyBundleChecksumAsync(options.FilePath, sha256Path, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (!checksumResult.IsValid)
|
||||
{
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.ChecksumMismatch,
|
||||
"SHA-256 checksum mismatch",
|
||||
options.FilePath,
|
||||
$"Expected: {checksumResult.ExpectedHash}, Computed: {checksumResult.ActualHash}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug("No co-located .sha256 file found for external checksum verification");
|
||||
}
|
||||
|
||||
// Step 3: Extract and parse bundle contents
|
||||
BundleContents contents;
|
||||
try
|
||||
{
|
||||
contents = await ExtractBundleContentsAsync(options.FilePath, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex) when (ex is InvalidDataException or JsonException or IOException)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to extract bundle contents");
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.FormatError,
|
||||
"Failed to extract bundle contents",
|
||||
options.FilePath,
|
||||
ex.Message);
|
||||
}
|
||||
|
||||
// Step 4: Verify internal checksums from checksums.txt
|
||||
if (contents.ChecksumsText is not null)
|
||||
{
|
||||
var internalCheckResult = VerifyInternalChecksums(contents);
|
||||
if (!internalCheckResult.Success)
|
||||
{
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.ChecksumMismatch,
|
||||
"Internal checksum verification failed",
|
||||
options.FilePath,
|
||||
internalCheckResult.ErrorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 5: Verify DSSE signature
|
||||
var signatureValid = VerifyDsseSignature(contents, options.Offline, out var signatureError);
|
||||
if (!signatureValid && !string.IsNullOrEmpty(signatureError))
|
||||
{
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.SignatureFailure,
|
||||
"DSSE signature verification failed",
|
||||
options.FilePath,
|
||||
signatureError);
|
||||
}
|
||||
|
||||
// Step 6: Check transparency entries (only if not offline and verifyTransparency is true)
|
||||
if (!options.Offline && options.VerifyTransparency)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(contents.TransparencyNdjson))
|
||||
{
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.MissingTransparency,
|
||||
"Transparency log entry missing",
|
||||
options.FilePath,
|
||||
"Bundle requires transparency.ndjson when not in offline mode");
|
||||
}
|
||||
}
|
||||
|
||||
// Step 7: Build success result
|
||||
var metadata = contents.Metadata;
|
||||
var subjects = ExtractSubjects(contents);
|
||||
|
||||
return new AttestationBundleVerifyResult(
|
||||
Success: true,
|
||||
Status: "verified",
|
||||
ExportId: metadata?.ExportId,
|
||||
AttestationId: metadata?.AttestationId,
|
||||
RootHash: FormatRootHash(metadata?.RootHash),
|
||||
Subjects: subjects,
|
||||
PredicateType: ExtractPredicateType(contents),
|
||||
StatementVersion: metadata?.StatementVersion,
|
||||
BundlePath: options.FilePath,
|
||||
ExitCode: AttestationBundleExitCodes.Success);
|
||||
}
|
||||
|
||||
public async Task<AttestationBundleImportResult> ImportAsync(
|
||||
AttestationBundleImportOptions options,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(options.FilePath);
|
||||
|
||||
_logger.LogDebug("Importing attestation bundle from {FilePath}", options.FilePath);
|
||||
|
||||
// First verify the bundle
|
||||
var verifyOptions = new AttestationBundleVerifyOptions(
|
||||
options.FilePath,
|
||||
options.Offline,
|
||||
options.VerifyTransparency,
|
||||
options.TrustRootPath);
|
||||
|
||||
var verifyResult = await VerifyAsync(verifyOptions, cancellationToken).ConfigureAwait(false);
|
||||
if (!verifyResult.Success)
|
||||
{
|
||||
return new AttestationBundleImportResult(
|
||||
Success: false,
|
||||
Status: "verification_failed",
|
||||
AttestationId: verifyResult.AttestationId,
|
||||
TenantId: null,
|
||||
Namespace: options.Namespace,
|
||||
RootHash: verifyResult.RootHash,
|
||||
ErrorMessage: verifyResult.ErrorMessage,
|
||||
ExitCode: verifyResult.ExitCode);
|
||||
}
|
||||
|
||||
// Extract metadata for import
|
||||
BundleContents contents;
|
||||
try
|
||||
{
|
||||
contents = await ExtractBundleContentsAsync(options.FilePath, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new AttestationBundleImportResult(
|
||||
Success: false,
|
||||
Status: "extraction_failed",
|
||||
AttestationId: null,
|
||||
TenantId: null,
|
||||
Namespace: options.Namespace,
|
||||
RootHash: null,
|
||||
ErrorMessage: ex.Message,
|
||||
ExitCode: AttestationBundleExitCodes.ImportFailed);
|
||||
}
|
||||
|
||||
var metadata = contents.Metadata;
|
||||
var tenantId = options.Tenant ?? metadata?.TenantId;
|
||||
|
||||
// Import is a local-only operation for air-gap scenarios
|
||||
// The actual import to backend would happen via separate API call
|
||||
_logger.LogInformation("Attestation bundle imported: {AttestationId} for tenant {TenantId}",
|
||||
metadata?.AttestationId, tenantId);
|
||||
|
||||
return new AttestationBundleImportResult(
|
||||
Success: true,
|
||||
Status: "imported",
|
||||
AttestationId: metadata?.AttestationId,
|
||||
TenantId: tenantId,
|
||||
Namespace: options.Namespace,
|
||||
RootHash: FormatRootHash(metadata?.RootHash),
|
||||
ExitCode: AttestationBundleExitCodes.Success);
|
||||
}
|
||||
|
||||
private async Task<(bool IsValid, string? ExpectedHash, string? ActualHash)> VerifyBundleChecksumAsync(
|
||||
string bundlePath,
|
||||
string sha256Path,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Read expected hash from .sha256 file
|
||||
var content = await File.ReadAllTextAsync(sha256Path, cancellationToken).ConfigureAwait(false);
|
||||
var expectedHash = content.Split(' ', StringSplitOptions.RemoveEmptyEntries).FirstOrDefault()?.Trim()?.ToLowerInvariant();
|
||||
|
||||
if (string.IsNullOrEmpty(expectedHash))
|
||||
{
|
||||
return (false, null, null);
|
||||
}
|
||||
|
||||
// Compute actual hash
|
||||
await using var stream = File.OpenRead(bundlePath);
|
||||
var hashBytes = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
var actualHash = Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||
|
||||
return (string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase), expectedHash, actualHash);
|
||||
}
|
||||
|
||||
private async Task<BundleContents> ExtractBundleContentsAsync(
|
||||
string bundlePath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var contents = new BundleContents();
|
||||
|
||||
await using var fileStream = File.OpenRead(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
using var tarReader = new TarReader(gzipStream);
|
||||
|
||||
TarEntry? entry;
|
||||
while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) is not null)
|
||||
{
|
||||
if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
using var memoryStream = new MemoryStream();
|
||||
await entry.DataStream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
|
||||
var data = memoryStream.ToArray();
|
||||
var text = System.Text.Encoding.UTF8.GetString(data);
|
||||
|
||||
switch (entry.Name)
|
||||
{
|
||||
case DsseEnvelopeFileName:
|
||||
contents.DsseEnvelopeJson = text;
|
||||
contents.DsseEnvelopeBytes = data;
|
||||
contents.DsseEnvelope = JsonSerializer.Deserialize<DsseEnvelope>(text, SerializerOptions);
|
||||
break;
|
||||
case StatementFileName:
|
||||
contents.StatementJson = text;
|
||||
contents.StatementBytes = data;
|
||||
contents.Statement = JsonSerializer.Deserialize<InTotoStatement>(text, SerializerOptions);
|
||||
break;
|
||||
case TransparencyFileName:
|
||||
contents.TransparencyNdjson = text;
|
||||
contents.TransparencyBytes = data;
|
||||
break;
|
||||
case MetadataFileName:
|
||||
contents.MetadataJson = text;
|
||||
contents.MetadataBytes = data;
|
||||
contents.Metadata = JsonSerializer.Deserialize<AttestationBundleMetadata>(text, SerializerOptions);
|
||||
break;
|
||||
case ChecksumsFileName:
|
||||
contents.ChecksumsText = text;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return contents;
|
||||
}
|
||||
|
||||
private (bool Success, string? ErrorMessage) VerifyInternalChecksums(BundleContents contents)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(contents.ChecksumsText))
|
||||
{
|
||||
return (true, null);
|
||||
}
|
||||
|
||||
var lines = contents.ChecksumsText.Split('\n', StringSplitOptions.RemoveEmptyEntries);
|
||||
foreach (var line in lines)
|
||||
{
|
||||
// Skip comments
|
||||
if (line.TrimStart().StartsWith('#'))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse "hash filename" format
|
||||
var parts = line.Split(new[] { ' ' }, 2, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (parts.Length != 2)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var expectedHash = parts[0].Trim().ToLowerInvariant();
|
||||
var fileName = parts[1].Trim();
|
||||
|
||||
byte[]? fileBytes = fileName switch
|
||||
{
|
||||
DsseEnvelopeFileName => contents.DsseEnvelopeBytes,
|
||||
StatementFileName => contents.StatementBytes,
|
||||
TransparencyFileName => contents.TransparencyBytes,
|
||||
MetadataFileName => contents.MetadataBytes,
|
||||
_ => null
|
||||
};
|
||||
|
||||
if (fileBytes is null)
|
||||
{
|
||||
// File not found in bundle - could be optional
|
||||
if (fileName == TransparencyFileName)
|
||||
{
|
||||
continue; // transparency.ndjson is optional
|
||||
}
|
||||
|
||||
return (false, $"File '{fileName}' referenced in checksums but not found in bundle");
|
||||
}
|
||||
|
||||
var actualHash = Convert.ToHexString(SHA256.HashData(fileBytes)).ToLowerInvariant();
|
||||
if (!string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return (false, $"Checksum mismatch for '{fileName}': expected {expectedHash}, got {actualHash}");
|
||||
}
|
||||
}
|
||||
|
||||
return (true, null);
|
||||
}
|
||||
|
||||
private bool VerifyDsseSignature(BundleContents contents, bool offline, out string? error)
|
||||
{
|
||||
error = null;
|
||||
|
||||
if (contents.DsseEnvelope is null || string.IsNullOrEmpty(contents.DsseEnvelope.Payload))
|
||||
{
|
||||
error = "DSSE envelope not found or has no payload";
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify payload matches statement
|
||||
if (contents.StatementJson is not null)
|
||||
{
|
||||
try
|
||||
{
|
||||
var payloadBytes = Convert.FromBase64String(contents.DsseEnvelope.Payload);
|
||||
var payloadJson = System.Text.Encoding.UTF8.GetString(payloadBytes);
|
||||
|
||||
// Compare parsed JSON to handle whitespace differences
|
||||
using var statementDoc = JsonDocument.Parse(contents.StatementJson);
|
||||
using var payloadDoc = JsonDocument.Parse(payloadJson);
|
||||
|
||||
// Check _type field matches
|
||||
var statementType = statementDoc.RootElement.TryGetProperty("_type", out var sType)
|
||||
? sType.GetString()
|
||||
: null;
|
||||
var payloadType = payloadDoc.RootElement.TryGetProperty("_type", out var pType)
|
||||
? pType.GetString()
|
||||
: null;
|
||||
|
||||
if (!string.Equals(statementType, payloadType, StringComparison.Ordinal))
|
||||
{
|
||||
error = "DSSE payload does not match statement _type";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (FormatException ex)
|
||||
{
|
||||
error = $"Invalid DSSE payload encoding: {ex.Message}";
|
||||
return false;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
error = $"Invalid DSSE payload JSON: {ex.Message}";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// In offline mode, we don't verify the actual cryptographic signature
|
||||
// (would require access to signing keys/certificates)
|
||||
if (offline)
|
||||
{
|
||||
_logger.LogDebug("Offline mode: skipping cryptographic signature verification");
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check that signatures exist
|
||||
if (contents.DsseEnvelope.Signatures is null || contents.DsseEnvelope.Signatures.Count == 0)
|
||||
{
|
||||
error = "DSSE envelope has no signatures";
|
||||
return false;
|
||||
}
|
||||
|
||||
// Online signature verification would require access to trust roots
|
||||
// For now, we trust the signature if payload matches and signatures exist
|
||||
return true;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string>? ExtractSubjects(BundleContents contents)
|
||||
{
|
||||
if (contents.Statement?.Subject is null || contents.Statement.Subject.Count == 0)
|
||||
{
|
||||
// Fall back to metadata subjects
|
||||
if (contents.Metadata?.SubjectDigests is not null)
|
||||
{
|
||||
return contents.Metadata.SubjectDigests
|
||||
.Select(s => $"{s.Name}@{s.Algorithm}:{s.Digest}")
|
||||
.ToList();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
return contents.Statement.Subject
|
||||
.Select(s =>
|
||||
{
|
||||
var digest = s.Digest?.FirstOrDefault();
|
||||
return digest.HasValue
|
||||
? $"{s.Name}@{digest.Value.Key}:{digest.Value.Value}"
|
||||
: s.Name ?? "unknown";
|
||||
})
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private static string? ExtractPredicateType(BundleContents contents)
|
||||
{
|
||||
return contents.Statement?.PredicateType ?? contents.DsseEnvelope?.PayloadType;
|
||||
}
|
||||
|
||||
private static string? FormatRootHash(string? rootHash)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(rootHash))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return rootHash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
|
||||
? rootHash
|
||||
: $"sha256:{rootHash}";
|
||||
}
|
||||
|
||||
private static AttestationBundleVerifyResult CreateFailedResult(
|
||||
int exitCode,
|
||||
string message,
|
||||
string bundlePath,
|
||||
string? detail = null)
|
||||
=> new(
|
||||
Success: false,
|
||||
Status: "failed",
|
||||
ExportId: null,
|
||||
AttestationId: null,
|
||||
RootHash: null,
|
||||
Subjects: null,
|
||||
PredicateType: null,
|
||||
StatementVersion: null,
|
||||
BundlePath: bundlePath,
|
||||
ErrorMessage: detail ?? message,
|
||||
ExitCode: exitCode);
|
||||
|
||||
private sealed class BundleContents
|
||||
{
|
||||
public string? DsseEnvelopeJson { get; set; }
|
||||
public byte[]? DsseEnvelopeBytes { get; set; }
|
||||
public DsseEnvelope? DsseEnvelope { get; set; }
|
||||
|
||||
public string? StatementJson { get; set; }
|
||||
public byte[]? StatementBytes { get; set; }
|
||||
public InTotoStatement? Statement { get; set; }
|
||||
|
||||
public string? TransparencyNdjson { get; set; }
|
||||
public byte[]? TransparencyBytes { get; set; }
|
||||
|
||||
public string? MetadataJson { get; set; }
|
||||
public byte[]? MetadataBytes { get; set; }
|
||||
public AttestationBundleMetadata? Metadata { get; set; }
|
||||
|
||||
public string? ChecksumsText { get; set; }
|
||||
}
|
||||
|
||||
private sealed class DsseEnvelope
|
||||
{
|
||||
public string? PayloadType { get; set; }
|
||||
public string? Payload { get; set; }
|
||||
public IReadOnlyList<DsseSignature>? Signatures { get; set; }
|
||||
}
|
||||
|
||||
private sealed class DsseSignature
|
||||
{
|
||||
public string? KeyId { get; set; }
|
||||
public string? Sig { get; set; }
|
||||
}
|
||||
|
||||
private sealed class InTotoStatement
|
||||
{
|
||||
public string? Type { get; set; }
|
||||
public string? PredicateType { get; set; }
|
||||
public IReadOnlyList<InTotoSubject>? Subject { get; set; }
|
||||
}
|
||||
|
||||
private sealed class InTotoSubject
|
||||
{
|
||||
public string? Name { get; set; }
|
||||
public Dictionary<string, string>? Digest { get; set; }
|
||||
}
|
||||
|
||||
private sealed record AttestationBundleMetadata(
|
||||
string? Version,
|
||||
string? ExportId,
|
||||
string? AttestationId,
|
||||
string? TenantId,
|
||||
DateTimeOffset? CreatedAtUtc,
|
||||
string? RootHash,
|
||||
string? SourceUri,
|
||||
string? StatementVersion,
|
||||
IReadOnlyList<AttestationSubjectDigest>? SubjectDigests);
|
||||
|
||||
private sealed record AttestationSubjectDigest(
|
||||
string? Name,
|
||||
string? Digest,
|
||||
string? Algorithm);
|
||||
}
|
||||
380
src/Cli/StellaOps.Cli/Services/DevPortalBundleVerifier.cs
Normal file
380
src/Cli/StellaOps.Cli/Services/DevPortalBundleVerifier.cs
Normal file
@@ -0,0 +1,380 @@
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Verifier for EvidenceLocker sealed bundles used in DevPortal offline verification.
|
||||
/// Per DVOFF-64-002.
|
||||
/// </summary>
|
||||
internal sealed class DevPortalBundleVerifier : IDevPortalBundleVerifier
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly ILogger<DevPortalBundleVerifier> _logger;
|
||||
|
||||
public DevPortalBundleVerifier(ILogger<DevPortalBundleVerifier> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<DevPortalBundleVerificationResult> VerifyBundleAsync(
|
||||
string bundlePath,
|
||||
bool offline,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(bundlePath);
|
||||
|
||||
_logger.LogDebug("Verifying DevPortal bundle at {BundlePath}, offline={Offline}", bundlePath, offline);
|
||||
|
||||
// Step 1: Check bundle exists
|
||||
if (!File.Exists(bundlePath))
|
||||
{
|
||||
return DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.Unexpected,
|
||||
"Bundle file not found",
|
||||
bundlePath);
|
||||
}
|
||||
|
||||
// Step 2: Validate SHA-256 against .sha256 file if present
|
||||
var sha256Path = bundlePath + ".sha256";
|
||||
if (File.Exists(sha256Path))
|
||||
{
|
||||
var checksumResult = await VerifyBundleChecksumAsync(bundlePath, sha256Path, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (!checksumResult.IsValid)
|
||||
{
|
||||
return DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.ChecksumMismatch,
|
||||
"SHA-256 checksum mismatch",
|
||||
$"Expected: {checksumResult.ExpectedHash}, Computed: {checksumResult.ActualHash}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug("No .sha256 file found, skipping checksum verification");
|
||||
}
|
||||
|
||||
// Step 3: Extract and parse bundle contents
|
||||
BundleContents contents;
|
||||
try
|
||||
{
|
||||
contents = await ExtractBundleContentsAsync(bundlePath, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex) when (ex is InvalidDataException or JsonException or IOException)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to extract bundle contents");
|
||||
return DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.Unexpected,
|
||||
"Failed to extract bundle contents",
|
||||
ex.Message);
|
||||
}
|
||||
|
||||
// Step 4: Verify DSSE signature
|
||||
var signatureValid = VerifyDsseSignature(contents, offline, out var signatureError);
|
||||
if (!signatureValid && !string.IsNullOrEmpty(signatureError))
|
||||
{
|
||||
return DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.SignatureFailure,
|
||||
"DSSE signature verification failed",
|
||||
signatureError);
|
||||
}
|
||||
|
||||
// Step 5: Verify TSA (only if not offline)
|
||||
if (!offline && contents.Signature is not null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(contents.Signature.TimestampAuthority) ||
|
||||
string.IsNullOrEmpty(contents.Signature.TimestampToken))
|
||||
{
|
||||
return DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.TsaMissing,
|
||||
"RFC3161 timestamp missing",
|
||||
"Bundle requires timestamping when not in offline mode");
|
||||
}
|
||||
}
|
||||
|
||||
// Step 6: Build success result
|
||||
return new DevPortalBundleVerificationResult
|
||||
{
|
||||
Status = "verified",
|
||||
BundleId = contents.Manifest?.BundleId ?? contents.BundleMetadata?.BundleId,
|
||||
RootHash = contents.BundleMetadata?.RootHash is not null
|
||||
? $"sha256:{contents.BundleMetadata.RootHash}"
|
||||
: null,
|
||||
Entries = contents.Manifest?.Entries?.Count ?? 0,
|
||||
CreatedAt = contents.Manifest?.CreatedAt ?? contents.BundleMetadata?.CreatedAt,
|
||||
Portable = contents.BundleMetadata?.PortableGeneratedAt is not null,
|
||||
ExitCode = DevPortalVerifyExitCode.Success
|
||||
};
|
||||
}
|
||||
|
||||
private async Task<(bool IsValid, string? ExpectedHash, string? ActualHash)> VerifyBundleChecksumAsync(
|
||||
string bundlePath,
|
||||
string sha256Path,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Read expected hash from .sha256 file
|
||||
var content = await File.ReadAllTextAsync(sha256Path, cancellationToken).ConfigureAwait(false);
|
||||
var expectedHash = content.Split(' ', StringSplitOptions.RemoveEmptyEntries).FirstOrDefault()?.Trim()?.ToLowerInvariant();
|
||||
|
||||
if (string.IsNullOrEmpty(expectedHash))
|
||||
{
|
||||
return (false, null, null);
|
||||
}
|
||||
|
||||
// Compute actual hash
|
||||
await using var stream = File.OpenRead(bundlePath);
|
||||
var hashBytes = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
var actualHash = Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||
|
||||
return (string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase), expectedHash, actualHash);
|
||||
}
|
||||
|
||||
private async Task<BundleContents> ExtractBundleContentsAsync(
|
||||
string bundlePath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var contents = new BundleContents();
|
||||
|
||||
await using var fileStream = File.OpenRead(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
using var tarReader = new TarReader(gzipStream);
|
||||
|
||||
TarEntry? entry;
|
||||
while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) is not null)
|
||||
{
|
||||
if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
using var memoryStream = new MemoryStream();
|
||||
await entry.DataStream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
|
||||
var json = System.Text.Encoding.UTF8.GetString(memoryStream.ToArray());
|
||||
|
||||
switch (entry.Name)
|
||||
{
|
||||
case "manifest.json":
|
||||
contents.ManifestJson = json;
|
||||
contents.Manifest = JsonSerializer.Deserialize<BundleManifest>(json, SerializerOptions);
|
||||
break;
|
||||
case "signature.json":
|
||||
contents.SignatureJson = json;
|
||||
contents.Signature = JsonSerializer.Deserialize<BundleSignature>(json, SerializerOptions);
|
||||
break;
|
||||
case "bundle.json":
|
||||
contents.BundleMetadataJson = json;
|
||||
contents.BundleMetadata = JsonSerializer.Deserialize<BundleMetadataDocument>(json, SerializerOptions);
|
||||
break;
|
||||
case "checksums.txt":
|
||||
contents.ChecksumsText = json;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return contents;
|
||||
}
|
||||
|
||||
private bool VerifyDsseSignature(BundleContents contents, bool offline, out string? error)
|
||||
{
|
||||
error = null;
|
||||
|
||||
if (contents.Signature is null || string.IsNullOrEmpty(contents.Signature.Payload))
|
||||
{
|
||||
error = "Signature not found in bundle";
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify payload matches manifest
|
||||
if (contents.ManifestJson is not null)
|
||||
{
|
||||
try
|
||||
{
|
||||
var payloadBytes = Convert.FromBase64String(contents.Signature.Payload);
|
||||
var payloadJson = System.Text.Encoding.UTF8.GetString(payloadBytes);
|
||||
|
||||
// Compare parsed JSON to handle whitespace differences
|
||||
using var manifestDoc = JsonDocument.Parse(contents.ManifestJson);
|
||||
using var payloadDoc = JsonDocument.Parse(payloadJson);
|
||||
|
||||
var manifestBundleId = manifestDoc.RootElement.TryGetProperty("bundleId", out var mId)
|
||||
? mId.GetString()
|
||||
: null;
|
||||
var payloadBundleId = payloadDoc.RootElement.TryGetProperty("bundleId", out var pId)
|
||||
? pId.GetString()
|
||||
: null;
|
||||
|
||||
if (!string.Equals(manifestBundleId, payloadBundleId, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
error = "Signature payload does not match manifest bundleId";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (FormatException ex)
|
||||
{
|
||||
error = $"Invalid signature payload encoding: {ex.Message}";
|
||||
return false;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
error = $"Invalid signature payload JSON: {ex.Message}";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// In offline mode, we don't verify the actual cryptographic signature
|
||||
// (would require access to signing keys/certificates)
|
||||
if (offline)
|
||||
{
|
||||
_logger.LogDebug("Offline mode: skipping cryptographic signature verification");
|
||||
return true;
|
||||
}
|
||||
|
||||
// Online signature verification would go here
|
||||
// For now, we trust the signature if payload matches
|
||||
return true;
|
||||
}
|
||||
|
||||
private sealed class BundleContents
|
||||
{
|
||||
public string? ManifestJson { get; set; }
|
||||
public BundleManifest? Manifest { get; set; }
|
||||
public string? SignatureJson { get; set; }
|
||||
public BundleSignature? Signature { get; set; }
|
||||
public string? BundleMetadataJson { get; set; }
|
||||
public BundleMetadataDocument? BundleMetadata { get; set; }
|
||||
public string? ChecksumsText { get; set; }
|
||||
}
|
||||
|
||||
private sealed class BundleManifest
|
||||
{
|
||||
public string? BundleId { get; set; }
|
||||
public string? TenantId { get; set; }
|
||||
public int Kind { get; set; }
|
||||
public DateTimeOffset? CreatedAt { get; set; }
|
||||
public Dictionary<string, string>? Metadata { get; set; }
|
||||
public List<BundleManifestEntry>? Entries { get; set; }
|
||||
}
|
||||
|
||||
private sealed class BundleManifestEntry
|
||||
{
|
||||
public string? Section { get; set; }
|
||||
public string? CanonicalPath { get; set; }
|
||||
public string? Sha256 { get; set; }
|
||||
public long SizeBytes { get; set; }
|
||||
public string? MediaType { get; set; }
|
||||
}
|
||||
|
||||
private sealed class BundleSignature
|
||||
{
|
||||
public string? PayloadType { get; set; }
|
||||
public string? Payload { get; set; }
|
||||
public string? Signature { get; set; }
|
||||
public string? KeyId { get; set; }
|
||||
public string? Algorithm { get; set; }
|
||||
public string? Provider { get; set; }
|
||||
public DateTimeOffset? SignedAt { get; set; }
|
||||
public DateTimeOffset? TimestampedAt { get; set; }
|
||||
public string? TimestampAuthority { get; set; }
|
||||
public string? TimestampToken { get; set; }
|
||||
}
|
||||
|
||||
private sealed class BundleMetadataDocument
|
||||
{
|
||||
public string? BundleId { get; set; }
|
||||
public string? TenantId { get; set; }
|
||||
public int Kind { get; set; }
|
||||
public int Status { get; set; }
|
||||
public string? RootHash { get; set; }
|
||||
public string? StorageKey { get; set; }
|
||||
public DateTimeOffset? CreatedAt { get; set; }
|
||||
public DateTimeOffset? SealedAt { get; set; }
|
||||
public DateTimeOffset? PortableGeneratedAt { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exit codes for DevPortal bundle verification per DVOFF-64-002.
|
||||
/// </summary>
|
||||
public enum DevPortalVerifyExitCode
|
||||
{
|
||||
/// <summary>Verification successful.</summary>
|
||||
Success = 0,
|
||||
|
||||
/// <summary>SHA-256 checksum mismatch.</summary>
|
||||
ChecksumMismatch = 2,
|
||||
|
||||
/// <summary>DSSE signature verification failed.</summary>
|
||||
SignatureFailure = 3,
|
||||
|
||||
/// <summary>RFC3161 timestamp missing (when not offline).</summary>
|
||||
TsaMissing = 4,
|
||||
|
||||
/// <summary>Unexpected error.</summary>
|
||||
Unexpected = 5
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of DevPortal bundle verification.
|
||||
/// </summary>
|
||||
public sealed class DevPortalBundleVerificationResult
|
||||
{
|
||||
public string Status { get; set; } = "failed";
|
||||
public string? BundleId { get; set; }
|
||||
public string? RootHash { get; set; }
|
||||
public int Entries { get; set; }
|
||||
public DateTimeOffset? CreatedAt { get; set; }
|
||||
public bool Portable { get; set; }
|
||||
public DevPortalVerifyExitCode ExitCode { get; set; } = DevPortalVerifyExitCode.Unexpected;
|
||||
public string? ErrorMessage { get; set; }
|
||||
public string? ErrorDetail { get; set; }
|
||||
|
||||
public static DevPortalBundleVerificationResult Failed(
|
||||
DevPortalVerifyExitCode exitCode,
|
||||
string message,
|
||||
string? detail = null)
|
||||
=> new()
|
||||
{
|
||||
Status = "failed",
|
||||
ExitCode = exitCode,
|
||||
ErrorMessage = message,
|
||||
ErrorDetail = detail
|
||||
};
|
||||
|
||||
public string ToJson()
|
||||
{
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
// Build output with sorted keys
|
||||
var output = new SortedDictionary<string, object?>(StringComparer.Ordinal);
|
||||
|
||||
if (BundleId is not null)
|
||||
output["bundleId"] = BundleId;
|
||||
if (CreatedAt.HasValue)
|
||||
output["createdAt"] = CreatedAt.Value.ToString("O");
|
||||
output["entries"] = Entries;
|
||||
if (ErrorDetail is not null)
|
||||
output["errorDetail"] = ErrorDetail;
|
||||
if (ErrorMessage is not null)
|
||||
output["errorMessage"] = ErrorMessage;
|
||||
output["portable"] = Portable;
|
||||
if (RootHash is not null)
|
||||
output["rootHash"] = RootHash;
|
||||
output["status"] = Status;
|
||||
|
||||
return JsonSerializer.Serialize(output, options);
|
||||
}
|
||||
}
|
||||
29
src/Cli/StellaOps.Cli/Services/IAttestationBundleVerifier.cs
Normal file
29
src/Cli/StellaOps.Cli/Services/IAttestationBundleVerifier.cs
Normal file
@@ -0,0 +1,29 @@
|
||||
using StellaOps.Cli.Services.Models;
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for attestation bundle verification.
|
||||
/// </summary>
|
||||
public interface IAttestationBundleVerifier
|
||||
{
|
||||
/// <summary>
|
||||
/// Verifies an attestation bundle exported from the Export Center.
|
||||
/// </summary>
|
||||
/// <param name="options">Verification options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Verification result with status and exit code.</returns>
|
||||
Task<AttestationBundleVerifyResult> VerifyAsync(
|
||||
AttestationBundleVerifyOptions options,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Imports an attestation bundle into the local system.
|
||||
/// </summary>
|
||||
/// <param name="options">Import options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Import result with status and exit code.</returns>
|
||||
Task<AttestationBundleImportResult> ImportAsync(
|
||||
AttestationBundleImportOptions options,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
19
src/Cli/StellaOps.Cli/Services/IDevPortalBundleVerifier.cs
Normal file
19
src/Cli/StellaOps.Cli/Services/IDevPortalBundleVerifier.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for DevPortal bundle verification.
|
||||
/// </summary>
|
||||
public interface IDevPortalBundleVerifier
|
||||
{
|
||||
/// <summary>
|
||||
/// Verifies a DevPortal/EvidenceLocker sealed bundle.
|
||||
/// </summary>
|
||||
/// <param name="bundlePath">Path to the bundle .tgz file.</param>
|
||||
/// <param name="offline">If true, skip TSA verification and online checks.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Verification result with status and exit code.</returns>
|
||||
Task<DevPortalBundleVerificationResult> VerifyBundleAsync(
|
||||
string bundlePath,
|
||||
bool offline,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
126
src/Cli/StellaOps.Cli/Services/Models/AttestationBundleModels.cs
Normal file
126
src/Cli/StellaOps.Cli/Services/Models/AttestationBundleModels.cs
Normal file
@@ -0,0 +1,126 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Cli.Services.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Options for attestation bundle verification.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleVerifyOptions(
|
||||
string FilePath,
|
||||
bool Offline = false,
|
||||
bool VerifyTransparency = true,
|
||||
string? TrustRootPath = null);
|
||||
|
||||
/// <summary>
|
||||
/// Options for attestation bundle import.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleImportOptions(
|
||||
string FilePath,
|
||||
string? Tenant = null,
|
||||
string? Namespace = null,
|
||||
bool Offline = false,
|
||||
bool VerifyTransparency = true,
|
||||
string? TrustRootPath = null);
|
||||
|
||||
/// <summary>
|
||||
/// Result of attestation bundle verification.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleVerifyResult(
|
||||
bool Success,
|
||||
string Status,
|
||||
string? ExportId,
|
||||
string? AttestationId,
|
||||
string? RootHash,
|
||||
IReadOnlyList<string>? Subjects,
|
||||
string? PredicateType,
|
||||
string? StatementVersion,
|
||||
string BundlePath,
|
||||
string? ErrorMessage = null,
|
||||
int ExitCode = 0);
|
||||
|
||||
/// <summary>
|
||||
/// Result of attestation bundle import.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleImportResult(
|
||||
bool Success,
|
||||
string Status,
|
||||
string? AttestationId,
|
||||
string? TenantId,
|
||||
string? Namespace,
|
||||
string? RootHash,
|
||||
string? ErrorMessage = null,
|
||||
int ExitCode = 0);
|
||||
|
||||
/// <summary>
|
||||
/// JSON output for attestation bundle verify command.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleVerifyJson(
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("exportId")] string? ExportId,
|
||||
[property: JsonPropertyName("attestationId")] string? AttestationId,
|
||||
[property: JsonPropertyName("rootHash")] string? RootHash,
|
||||
[property: JsonPropertyName("subjects")] IReadOnlyList<string>? Subjects,
|
||||
[property: JsonPropertyName("predicateType")] string? PredicateType,
|
||||
[property: JsonPropertyName("bundlePath")] string BundlePath);
|
||||
|
||||
/// <summary>
|
||||
/// JSON output for attestation bundle import command.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleImportJson(
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("attestationId")] string? AttestationId,
|
||||
[property: JsonPropertyName("tenantId")] string? TenantId,
|
||||
[property: JsonPropertyName("namespace")] string? Namespace,
|
||||
[property: JsonPropertyName("rootHash")] string? RootHash);
|
||||
|
||||
/// <summary>
|
||||
/// Exit codes for attestation bundle commands.
|
||||
/// </summary>
|
||||
public static class AttestationBundleExitCodes
|
||||
{
|
||||
/// <summary>Success.</summary>
|
||||
public const int Success = 0;
|
||||
|
||||
/// <summary>General failure.</summary>
|
||||
public const int GeneralFailure = 1;
|
||||
|
||||
/// <summary>Checksum mismatch.</summary>
|
||||
public const int ChecksumMismatch = 2;
|
||||
|
||||
/// <summary>DSSE signature verification failure.</summary>
|
||||
public const int SignatureFailure = 3;
|
||||
|
||||
/// <summary>Missing required TSA/CT log entry.</summary>
|
||||
public const int MissingTransparency = 4;
|
||||
|
||||
/// <summary>Archive or file format error.</summary>
|
||||
public const int FormatError = 5;
|
||||
|
||||
/// <summary>File not found.</summary>
|
||||
public const int FileNotFound = 6;
|
||||
|
||||
/// <summary>Import failed.</summary>
|
||||
public const int ImportFailed = 7;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metadata parsed from an attestation bundle.
|
||||
/// </summary>
|
||||
internal sealed record AttestationBundleMetadata(
|
||||
string? Version,
|
||||
string? ExportId,
|
||||
string? AttestationId,
|
||||
string? TenantId,
|
||||
DateTimeOffset? CreatedAtUtc,
|
||||
string? RootHash,
|
||||
string? SourceUri,
|
||||
string? StatementVersion,
|
||||
IReadOnlyList<AttestationBundleSubjectDigest>? SubjectDigests);
|
||||
|
||||
/// <summary>
|
||||
/// Subject digest from attestation bundle metadata.
|
||||
/// </summary>
|
||||
internal sealed record AttestationBundleSubjectDigest(
|
||||
string? Name,
|
||||
string? Digest,
|
||||
string? Algorithm);
|
||||
@@ -0,0 +1,406 @@
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Cli.Services;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
|
||||
namespace StellaOps.Cli.Tests;
|
||||
|
||||
public sealed class AttestationBundleVerifierTests : IDisposable
|
||||
{
|
||||
private readonly string _tempDir;
|
||||
private readonly AttestationBundleVerifier _verifier;
|
||||
|
||||
public AttestationBundleVerifierTests()
|
||||
{
|
||||
_tempDir = Path.Combine(Path.GetTempPath(), $"attest-bundle-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_tempDir);
|
||||
|
||||
_verifier = new AttestationBundleVerifier(NullLogger<AttestationBundleVerifier>.Instance);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_tempDir))
|
||||
{
|
||||
Directory.Delete(_tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_FileNotFound_ReturnsFileNotFoundCode()
|
||||
{
|
||||
var options = new AttestationBundleVerifyOptions(
|
||||
Path.Combine(_tempDir, "nonexistent.tgz"),
|
||||
Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.FileNotFound, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ValidBundle_ReturnsSuccess()
|
||||
{
|
||||
var bundlePath = await CreateValidBundleAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode);
|
||||
Assert.Equal("verified", result.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ValidBundle_ReturnsMetadata()
|
||||
{
|
||||
var bundlePath = await CreateValidBundleAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.ExportId);
|
||||
Assert.NotNull(result.AttestationId);
|
||||
Assert.NotNull(result.RootHash);
|
||||
Assert.StartsWith("sha256:", result.RootHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_CorruptedArchive_ReturnsFormatError()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, "corrupted.tgz");
|
||||
await File.WriteAllBytesAsync(bundlePath, Encoding.UTF8.GetBytes("not a valid tgz"));
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.FormatError, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ChecksumMismatch_ReturnsChecksumMismatchCode()
|
||||
{
|
||||
var bundlePath = await CreateBundleWithBadChecksumAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.ChecksumMismatch, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ExternalChecksumMismatch_ReturnsChecksumMismatchCode()
|
||||
{
|
||||
var bundlePath = await CreateValidBundleAsync();
|
||||
var checksumPath = bundlePath + ".sha256";
|
||||
await File.WriteAllTextAsync(checksumPath, "0000000000000000000000000000000000000000000000000000000000000000 " + Path.GetFileName(bundlePath));
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.ChecksumMismatch, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_MissingTransparency_WhenNotOffline_ReturnsMissingTransparencyCode()
|
||||
{
|
||||
var bundlePath = await CreateBundleWithoutTransparencyAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(
|
||||
bundlePath,
|
||||
Offline: false,
|
||||
VerifyTransparency: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.MissingTransparency, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_MissingTransparency_WhenOffline_ReturnsSuccess()
|
||||
{
|
||||
var bundlePath = await CreateBundleWithoutTransparencyAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(
|
||||
bundlePath,
|
||||
Offline: true,
|
||||
VerifyTransparency: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_MissingDssePayload_ReturnsSignatureFailure()
|
||||
{
|
||||
var bundlePath = await CreateBundleWithMissingDssePayloadAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.SignatureFailure, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportAsync_ValidBundle_ReturnsSuccess()
|
||||
{
|
||||
var bundlePath = await CreateValidBundleAsync();
|
||||
|
||||
var options = new AttestationBundleImportOptions(
|
||||
bundlePath,
|
||||
Tenant: "test-tenant",
|
||||
Namespace: "test-namespace",
|
||||
Offline: true);
|
||||
|
||||
var result = await _verifier.ImportAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode);
|
||||
Assert.Equal("imported", result.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportAsync_InvalidBundle_ReturnsVerificationFailed()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, "invalid.tgz");
|
||||
await File.WriteAllBytesAsync(bundlePath, Encoding.UTF8.GetBytes("not valid"));
|
||||
|
||||
var options = new AttestationBundleImportOptions(
|
||||
bundlePath,
|
||||
Tenant: "test-tenant",
|
||||
Offline: true);
|
||||
|
||||
var result = await _verifier.ImportAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal("verification_failed", result.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportAsync_InheritsTenantFromMetadata()
|
||||
{
|
||||
var bundlePath = await CreateValidBundleAsync();
|
||||
|
||||
var options = new AttestationBundleImportOptions(
|
||||
bundlePath,
|
||||
Tenant: null, // Not specified
|
||||
Offline: true);
|
||||
|
||||
var result = await _verifier.ImportAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.TenantId); // Should come from bundle metadata
|
||||
}
|
||||
|
||||
private async Task<string> CreateValidBundleAsync()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"valid-bundle-{Guid.NewGuid():N}.tgz");
|
||||
var exportId = Guid.NewGuid().ToString("D");
|
||||
var attestationId = Guid.NewGuid().ToString("D");
|
||||
var tenantId = Guid.NewGuid().ToString("D");
|
||||
|
||||
// Create statement JSON
|
||||
var statement = new
|
||||
{
|
||||
_type = "https://in-toto.io/Statement/v1",
|
||||
predicateType = "https://stellaops.io/attestations/vuln-scan/v1",
|
||||
subject = new[]
|
||||
{
|
||||
new { name = "test-image:latest", digest = new Dictionary<string, string> { ["sha256"] = "abc123" } }
|
||||
},
|
||||
predicate = new { }
|
||||
};
|
||||
var statementJson = JsonSerializer.Serialize(statement);
|
||||
var statementBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(statementJson));
|
||||
|
||||
// Create DSSE envelope
|
||||
var dsse = new
|
||||
{
|
||||
payloadType = "application/vnd.in-toto+json",
|
||||
payload = statementBase64,
|
||||
signatures = new[]
|
||||
{
|
||||
new { keyid = "key-001", sig = "fake-signature-for-test" }
|
||||
}
|
||||
};
|
||||
var dsseJson = JsonSerializer.Serialize(dsse);
|
||||
|
||||
// Create metadata
|
||||
var metadata = new
|
||||
{
|
||||
version = "attestation-bundle/v1",
|
||||
exportId,
|
||||
attestationId,
|
||||
tenantId,
|
||||
createdAtUtc = DateTimeOffset.UtcNow.ToString("O"),
|
||||
rootHash = "abc123def456",
|
||||
statementVersion = "v1"
|
||||
};
|
||||
var metadataJson = JsonSerializer.Serialize(metadata);
|
||||
|
||||
// Create transparency entries
|
||||
var transparencyNdjson = "{\"logIndex\":1,\"logId\":\"test\"}\n";
|
||||
|
||||
// Calculate checksums
|
||||
var dsseHash = ComputeHash(dsseJson);
|
||||
var statementHash = ComputeHash(statementJson);
|
||||
var metadataHash = ComputeHash(metadataJson);
|
||||
var transparencyHash = ComputeHash(transparencyNdjson);
|
||||
|
||||
var checksums = new StringBuilder();
|
||||
checksums.AppendLine("# Attestation bundle checksums (sha256)");
|
||||
checksums.AppendLine($"{dsseHash} attestation.dsse.json");
|
||||
checksums.AppendLine($"{metadataHash} metadata.json");
|
||||
checksums.AppendLine($"{statementHash} statement.json");
|
||||
checksums.AppendLine($"{transparencyHash} transparency.ndjson");
|
||||
var checksumsText = checksums.ToString();
|
||||
|
||||
// Create archive
|
||||
await using var fileStream = File.Create(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
|
||||
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
|
||||
|
||||
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
|
||||
await WriteEntryAsync(tarWriter, "checksums.txt", checksumsText);
|
||||
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
|
||||
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
|
||||
await WriteEntryAsync(tarWriter, "transparency.ndjson", transparencyNdjson);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private async Task<string> CreateBundleWithoutTransparencyAsync()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"no-transparency-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
var statement = new
|
||||
{
|
||||
_type = "https://in-toto.io/Statement/v1",
|
||||
predicateType = "https://stellaops.io/attestations/vuln-scan/v1",
|
||||
subject = new[] { new { name = "test", digest = new Dictionary<string, string> { ["sha256"] = "abc" } } }
|
||||
};
|
||||
var statementJson = JsonSerializer.Serialize(statement);
|
||||
var statementBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(statementJson));
|
||||
|
||||
var dsse = new
|
||||
{
|
||||
payloadType = "application/vnd.in-toto+json",
|
||||
payload = statementBase64,
|
||||
signatures = new[] { new { keyid = "key-001", sig = "fake-sig" } }
|
||||
};
|
||||
var dsseJson = JsonSerializer.Serialize(dsse);
|
||||
|
||||
var metadata = new
|
||||
{
|
||||
version = "attestation-bundle/v1",
|
||||
exportId = Guid.NewGuid().ToString("D"),
|
||||
attestationId = Guid.NewGuid().ToString("D"),
|
||||
tenantId = Guid.NewGuid().ToString("D"),
|
||||
rootHash = "abc123"
|
||||
};
|
||||
var metadataJson = JsonSerializer.Serialize(metadata);
|
||||
|
||||
var dsseHash = ComputeHash(dsseJson);
|
||||
var statementHash = ComputeHash(statementJson);
|
||||
var metadataHash = ComputeHash(metadataJson);
|
||||
|
||||
var checksums = $"# Checksums\n{dsseHash} attestation.dsse.json\n{metadataHash} metadata.json\n{statementHash} statement.json\n";
|
||||
|
||||
await using var fileStream = File.Create(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
|
||||
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
|
||||
|
||||
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
|
||||
await WriteEntryAsync(tarWriter, "checksums.txt", checksums);
|
||||
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
|
||||
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
|
||||
// No transparency.ndjson
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private async Task<string> CreateBundleWithBadChecksumAsync()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"bad-checksum-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
var dsseJson = "{\"payloadType\":\"test\",\"payload\":\"dGVzdA==\",\"signatures\":[{\"keyid\":\"k\",\"sig\":\"s\"}]}";
|
||||
var statementJson = "{\"_type\":\"test\"}";
|
||||
var metadataJson = "{\"version\":\"v1\"}";
|
||||
|
||||
// Intentionally wrong checksum
|
||||
var checksums = "0000000000000000000000000000000000000000000000000000000000000000 attestation.dsse.json\n";
|
||||
|
||||
await using var fileStream = File.Create(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
|
||||
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
|
||||
|
||||
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
|
||||
await WriteEntryAsync(tarWriter, "checksums.txt", checksums);
|
||||
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
|
||||
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private async Task<string> CreateBundleWithMissingDssePayloadAsync()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"no-dsse-payload-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
// DSSE without payload
|
||||
var dsseJson = "{\"payloadType\":\"test\",\"signatures\":[]}";
|
||||
var statementJson = "{\"_type\":\"test\"}";
|
||||
var metadataJson = "{\"version\":\"v1\"}";
|
||||
|
||||
var dsseHash = ComputeHash(dsseJson);
|
||||
var statementHash = ComputeHash(statementJson);
|
||||
var metadataHash = ComputeHash(metadataJson);
|
||||
var checksums = $"{dsseHash} attestation.dsse.json\n{metadataHash} metadata.json\n{statementHash} statement.json\n";
|
||||
|
||||
await using var fileStream = File.Create(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
|
||||
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
|
||||
|
||||
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
|
||||
await WriteEntryAsync(tarWriter, "checksums.txt", checksums);
|
||||
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
|
||||
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private static async Task WriteEntryAsync(TarWriter writer, string name, string content)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
using var dataStream = new MemoryStream(bytes);
|
||||
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
|
||||
{
|
||||
DataStream = dataStream
|
||||
};
|
||||
await writer.WriteEntryAsync(entry);
|
||||
}
|
||||
|
||||
private static string ComputeHash(string content)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
var hash = System.Security.Cryptography.SHA256.HashData(bytes);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,316 @@
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Cli.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Cli.Tests.Services;
|
||||
|
||||
public sealed class DevPortalBundleVerifierTests : IDisposable
|
||||
{
|
||||
private readonly string _tempDir;
|
||||
private readonly DevPortalBundleVerifier _verifier;
|
||||
|
||||
public DevPortalBundleVerifierTests()
|
||||
{
|
||||
_tempDir = Path.Combine(Path.GetTempPath(), $"devportal-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_tempDir);
|
||||
_verifier = new DevPortalBundleVerifier(NullLogger<DevPortalBundleVerifier>.Instance);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_tempDir))
|
||||
{
|
||||
Directory.Delete(_tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_ReturnsSuccess_ForValidBundle()
|
||||
{
|
||||
var bundlePath = CreateValidBundle();
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
|
||||
|
||||
Assert.Equal("verified", result.Status);
|
||||
Assert.Equal(DevPortalVerifyExitCode.Success, result.ExitCode);
|
||||
Assert.Equal("a1b2c3d4-e5f6-7890-abcd-ef1234567890", result.BundleId);
|
||||
Assert.NotNull(result.RootHash);
|
||||
Assert.True(result.RootHash!.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase));
|
||||
Assert.Equal(1, result.Entries);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_ReturnsUnexpected_WhenBundleNotFound()
|
||||
{
|
||||
var nonExistentPath = Path.Combine(_tempDir, "nonexistent.tgz");
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(nonExistentPath, offline: true, CancellationToken.None);
|
||||
|
||||
Assert.Equal("failed", result.Status);
|
||||
Assert.Equal(DevPortalVerifyExitCode.Unexpected, result.ExitCode);
|
||||
Assert.Contains("not found", result.ErrorMessage, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_ReturnsChecksumMismatch_WhenSha256DoesNotMatch()
|
||||
{
|
||||
var bundlePath = CreateValidBundle();
|
||||
var sha256Path = bundlePath + ".sha256";
|
||||
|
||||
// Write incorrect hash
|
||||
await File.WriteAllTextAsync(sha256Path, "0000000000000000000000000000000000000000000000000000000000000000 bundle.tgz");
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
|
||||
|
||||
Assert.Equal("failed", result.Status);
|
||||
Assert.Equal(DevPortalVerifyExitCode.ChecksumMismatch, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_SucceedsWithoutSha256File()
|
||||
{
|
||||
var bundlePath = CreateValidBundle();
|
||||
|
||||
// Remove .sha256 file if exists
|
||||
var sha256Path = bundlePath + ".sha256";
|
||||
if (File.Exists(sha256Path))
|
||||
{
|
||||
File.Delete(sha256Path);
|
||||
}
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
|
||||
|
||||
Assert.Equal("verified", result.Status);
|
||||
Assert.Equal(DevPortalVerifyExitCode.Success, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_ReturnsTsaMissing_WhenOnlineAndNoTimestamp()
|
||||
{
|
||||
var bundlePath = CreateBundleWithoutTimestamp();
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: false, CancellationToken.None);
|
||||
|
||||
Assert.Equal("failed", result.Status);
|
||||
Assert.Equal(DevPortalVerifyExitCode.TsaMissing, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_DetectsPortableBundle()
|
||||
{
|
||||
var bundlePath = CreatePortableBundle();
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
|
||||
|
||||
Assert.Equal("verified", result.Status);
|
||||
Assert.True(result.Portable);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToJson_OutputsKeysSortedAlphabetically()
|
||||
{
|
||||
var result = new DevPortalBundleVerificationResult
|
||||
{
|
||||
Status = "verified",
|
||||
BundleId = "test-id",
|
||||
RootHash = "sha256:abc123",
|
||||
Entries = 3,
|
||||
CreatedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
Portable = false,
|
||||
ExitCode = DevPortalVerifyExitCode.Success
|
||||
};
|
||||
|
||||
var json = result.ToJson();
|
||||
|
||||
// Keys should be in alphabetical order
|
||||
var keys = JsonDocument.Parse(json).RootElement.EnumerateObject()
|
||||
.Select(p => p.Name)
|
||||
.ToList();
|
||||
|
||||
var sortedKeys = keys.OrderBy(k => k, StringComparer.Ordinal).ToList();
|
||||
Assert.Equal(sortedKeys, keys);
|
||||
}
|
||||
|
||||
private string CreateValidBundle()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"bundle-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
var manifest = new
|
||||
{
|
||||
bundleId = "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
|
||||
tenantId = "00000000-0000-0000-0000-000000000001",
|
||||
kind = 2,
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
metadata = new Dictionary<string, string> { ["source"] = "test" },
|
||||
entries = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
section = "sbom",
|
||||
canonicalPath = "sbom/cyclonedx.json",
|
||||
sha256 = new string('a', 64),
|
||||
sizeBytes = 1024,
|
||||
mediaType = "application/vnd.cyclonedx+json"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var manifestJson = JsonSerializer.Serialize(manifest, new JsonSerializerOptions { WriteIndented = false });
|
||||
var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson));
|
||||
|
||||
var signature = new
|
||||
{
|
||||
payloadType = "application/vnd.stella.evidence.manifest+json",
|
||||
payload = manifestPayload,
|
||||
signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")),
|
||||
keyId = "key-1",
|
||||
algorithm = "ES256",
|
||||
provider = "StellaOps",
|
||||
signedAt = "2025-12-07T10:30:05Z",
|
||||
timestampedAt = "2025-12-07T10:30:06Z",
|
||||
timestampAuthority = "https://freetsa.org/tsr",
|
||||
timestampToken = Convert.ToBase64String(Encoding.UTF8.GetBytes("tsa-token"))
|
||||
};
|
||||
|
||||
var bundleMetadata = new
|
||||
{
|
||||
bundleId = "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
|
||||
tenantId = "00000000-0000-0000-0000-000000000001",
|
||||
kind = 2,
|
||||
status = 3,
|
||||
rootHash = new string('f', 64),
|
||||
storageKey = "evidence/bundle.tgz",
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
sealedAt = "2025-12-07T10:30:05Z"
|
||||
};
|
||||
|
||||
CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private string CreateBundleWithoutTimestamp()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"bundle-no-tsa-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
var manifest = new
|
||||
{
|
||||
bundleId = "b2c3d4e5-f6a7-8901-bcde-f23456789012",
|
||||
tenantId = "00000000-0000-0000-0000-000000000001",
|
||||
kind = 2,
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
entries = Array.Empty<object>()
|
||||
};
|
||||
|
||||
var manifestJson = JsonSerializer.Serialize(manifest);
|
||||
var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson));
|
||||
|
||||
var signature = new
|
||||
{
|
||||
payloadType = "application/vnd.stella.evidence.manifest+json",
|
||||
payload = manifestPayload,
|
||||
signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")),
|
||||
keyId = "key-1",
|
||||
algorithm = "ES256",
|
||||
provider = "StellaOps",
|
||||
signedAt = "2025-12-07T10:30:05Z"
|
||||
// No timestampedAt, timestampAuthority, timestampToken
|
||||
};
|
||||
|
||||
var bundleMetadata = new
|
||||
{
|
||||
bundleId = "b2c3d4e5-f6a7-8901-bcde-f23456789012",
|
||||
tenantId = "00000000-0000-0000-0000-000000000001",
|
||||
kind = 2,
|
||||
status = 3,
|
||||
rootHash = new string('e', 64),
|
||||
storageKey = "evidence/bundle.tgz",
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
sealedAt = "2025-12-07T10:30:05Z"
|
||||
};
|
||||
|
||||
CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private string CreatePortableBundle()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"portable-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
var manifest = new
|
||||
{
|
||||
bundleId = "c3d4e5f6-a7b8-9012-cdef-345678901234",
|
||||
kind = 1,
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
entries = Array.Empty<object>()
|
||||
};
|
||||
|
||||
var manifestJson = JsonSerializer.Serialize(manifest);
|
||||
var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson));
|
||||
|
||||
var signature = new
|
||||
{
|
||||
payloadType = "application/vnd.stella.evidence.manifest+json",
|
||||
payload = manifestPayload,
|
||||
signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")),
|
||||
keyId = "key-1",
|
||||
algorithm = "ES256",
|
||||
provider = "StellaOps",
|
||||
signedAt = "2025-12-07T10:30:05Z",
|
||||
timestampedAt = "2025-12-07T10:30:06Z",
|
||||
timestampAuthority = "tsa.default",
|
||||
timestampToken = Convert.ToBase64String(Encoding.UTF8.GetBytes("tsa-token"))
|
||||
};
|
||||
|
||||
var bundleMetadata = new
|
||||
{
|
||||
bundleId = "c3d4e5f6-a7b8-9012-cdef-345678901234",
|
||||
kind = 1,
|
||||
status = 3,
|
||||
rootHash = new string('d', 64),
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
sealedAt = "2025-12-07T10:30:05Z",
|
||||
portableGeneratedAt = "2025-12-07T10:35:00Z" // Indicates portable bundle
|
||||
};
|
||||
|
||||
CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private static void CreateTgzBundle(string bundlePath, string manifestJson, object signature, object bundleMetadata)
|
||||
{
|
||||
using var memoryStream = new MemoryStream();
|
||||
using (var gzipStream = new GZipStream(memoryStream, CompressionLevel.Optimal, leaveOpen: true))
|
||||
using (var tarWriter = new TarWriter(gzipStream))
|
||||
{
|
||||
AddTarEntry(tarWriter, "manifest.json", manifestJson);
|
||||
AddTarEntry(tarWriter, "signature.json", JsonSerializer.Serialize(signature));
|
||||
AddTarEntry(tarWriter, "bundle.json", JsonSerializer.Serialize(bundleMetadata));
|
||||
AddTarEntry(tarWriter, "checksums.txt", $"# checksums\n{new string('f', 64)} sbom/cyclonedx.json\n");
|
||||
}
|
||||
|
||||
memoryStream.Position = 0;
|
||||
using var fileStream = File.Create(bundlePath);
|
||||
memoryStream.CopyTo(fileStream);
|
||||
}
|
||||
|
||||
private static void AddTarEntry(TarWriter writer, string name, string content)
|
||||
{
|
||||
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
|
||||
{
|
||||
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead,
|
||||
ModificationTime = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero)
|
||||
};
|
||||
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
entry.DataStream = new MemoryStream(bytes);
|
||||
writer.WriteEntry(entry);
|
||||
}
|
||||
}
|
||||
34
src/Concelier/Directory.Build.props
Normal file
34
src/Concelier/Directory.Build.props
Normal file
@@ -0,0 +1,34 @@
|
||||
<Project>
|
||||
<PropertyGroup>
|
||||
<!-- Keep Concelier test harness active while trimming Mongo dependencies. Allow opt-out per project. -->
|
||||
<UseConcelierTestInfra Condition="'$(UseConcelierTestInfra)'==''">true</UseConcelierTestInfra>
|
||||
<!-- Suppress noisy warnings from duplicate usings and analyzer fixture hints while Mongo shims are in play. -->
|
||||
<NoWarn>$(NoWarn);CS0105;RS1032;RS2007;xUnit1041;NU1510</NoWarn>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<!-- Concelier is migrating off MongoDB; strip implicit Mongo2Go/Mongo driver packages inherited from the repo root. -->
|
||||
<PackageReference Remove="Mongo2Go" />
|
||||
<PackageReference Remove="MongoDB.Driver" />
|
||||
</ItemGroup>
|
||||
<ItemGroup Condition="$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests')) and '$(UseConcelierTestInfra)'=='true'">
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" />
|
||||
<ProjectReference Include="$(MSBuildThisFileDirectory)__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj"
|
||||
Condition="Exists('$(MSBuildThisFileDirectory)__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj')" />
|
||||
<Using Include="StellaOps.Concelier.Testing"
|
||||
Condition="Exists('$(MSBuildThisFileDirectory)__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj')" />
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
<!-- Keep OpenSSL shim sources available to Mongo2Go-free test harnesses if needed. -->
|
||||
<ItemGroup Condition="$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests')) and '$(UseConcelierTestInfra)'=='true'">
|
||||
<None Include="$(MSBuildThisFileDirectory)..\..\tests\native\openssl-1.1\linux-x64\*.so.1.1"
|
||||
Link="native/linux-x64/%(Filename)%(Extension)"
|
||||
CopyToOutputDirectory="PreserveNewest" />
|
||||
<Compile Include="$(MSBuildThisFileDirectory)..\..\tests\shared\OpenSslLegacyShim.cs" Link="Shared/OpenSslLegacyShim.cs" />
|
||||
<Compile Include="$(MSBuildThisFileDirectory)..\..\tests\shared\OpenSslAutoInit.cs" Link="Shared/OpenSslAutoInit.cs" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -30,7 +30,7 @@ public sealed class RawDocumentStorage
|
||||
string uri,
|
||||
byte[] content,
|
||||
string? contentType,
|
||||
DateTimeOffset? expiresAt,
|
||||
DateTimeOffset? ExpiresAt,
|
||||
CancellationToken cancellationToken,
|
||||
Guid? documentId = null)
|
||||
{
|
||||
|
||||
@@ -418,7 +418,7 @@ public sealed class UbuntuConnector : IFeedConnector
|
||||
await _stateRepository.UpdateCursorAsync(SourceName, doc, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static string ComputeNoticeHash(BsonDocument document)
|
||||
private string ComputeNoticeHash(BsonDocument document)
|
||||
{
|
||||
var bytes = document.ToBson();
|
||||
var hash = _hash.ComputeHash(bytes, HashAlgorithms.Sha256);
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Linksets
|
||||
{
|
||||
public static class PolicyAuthSignalFactory
|
||||
{
|
||||
public static PolicyAuthSignal ToPolicyAuthSignal(AdvisoryLinkset linkset)
|
||||
{
|
||||
if (linkset is null) throw new ArgumentNullException(nameof(linkset));
|
||||
|
||||
var subject = linkset.Normalized?.Purls?.FirstOrDefault() ?? linkset.AdvisoryId;
|
||||
var evidenceUri = $"urn:linkset:{linkset.AdvisoryId}";
|
||||
|
||||
return new PolicyAuthSignal(
|
||||
Id: linkset.AdvisoryId,
|
||||
Tenant: linkset.TenantId,
|
||||
Subject: subject ?? string.Empty,
|
||||
Source: linkset.Source,
|
||||
SignalType: "reachability",
|
||||
Evidence: new[]
|
||||
{
|
||||
new PolicyAuthEvidence(evidenceUri)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record PolicyAuthSignal(
|
||||
string Id,
|
||||
string Tenant,
|
||||
string Subject,
|
||||
string Source,
|
||||
string SignalType,
|
||||
IReadOnlyList<PolicyAuthEvidence> Evidence);
|
||||
|
||||
public sealed record PolicyAuthEvidence(string Uri);
|
||||
}
|
||||
@@ -1,42 +1,36 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace MongoDB.Bson
|
||||
{
|
||||
public readonly struct ObjectId : IEquatable<ObjectId>
|
||||
public enum BsonType
|
||||
{
|
||||
public Guid Value { get; }
|
||||
public ObjectId(Guid value) => Value = value;
|
||||
public ObjectId(string value) => Value = Guid.TryParse(value, out var g) ? g : Guid.Empty;
|
||||
public static ObjectId GenerateNewId() => new(Guid.NewGuid());
|
||||
public static ObjectId Empty => new(Guid.Empty);
|
||||
public bool Equals(ObjectId other) => Value.Equals(other.Value);
|
||||
public override bool Equals(object? obj) => obj is ObjectId other && Equals(other);
|
||||
public override int GetHashCode() => Value.GetHashCode();
|
||||
public override string ToString() => Value.ToString("N");
|
||||
public static bool operator ==(ObjectId left, ObjectId right) => left.Equals(right);
|
||||
public static bool operator !=(ObjectId left, ObjectId right) => !left.Equals(right);
|
||||
Double,
|
||||
String,
|
||||
Document,
|
||||
Array,
|
||||
Binary,
|
||||
ObjectId,
|
||||
Boolean,
|
||||
DateTime,
|
||||
Null,
|
||||
Int32,
|
||||
Int64
|
||||
}
|
||||
|
||||
public enum BsonType { Document, Array, String, Boolean, Int32, Int64, Double, DateTime, Guid, Null }
|
||||
|
||||
public class BsonValue
|
||||
public class BsonValue : IEquatable<BsonValue?>
|
||||
{
|
||||
protected readonly object? _value;
|
||||
public BsonValue(object? value) => _value = value;
|
||||
internal object? RawValue => _value;
|
||||
public static implicit operator BsonValue(string value) => new BsonString(value ?? string.Empty);
|
||||
public static implicit operator BsonValue(bool value) => new BsonBoolean(value);
|
||||
public static implicit operator BsonValue(int value) => new BsonInt32(value);
|
||||
public static implicit operator BsonValue(long value) => new BsonInt64(value);
|
||||
public static implicit operator BsonValue(double value) => new BsonDouble(value);
|
||||
public static implicit operator BsonValue(DateTime value) => new BsonDateTime(DateTime.SpecifyKind(value, DateTimeKind.Utc));
|
||||
public static implicit operator BsonValue(DateTimeOffset value) => new BsonDateTime(value.UtcDateTime);
|
||||
public static implicit operator BsonValue(Guid value) => new BsonString(value.ToString("D"));
|
||||
public static BsonValue Create(object? value) => BsonDocument.WrapExternal(value);
|
||||
public virtual BsonType BsonType => _value switch
|
||||
protected object? RawValue;
|
||||
|
||||
public BsonValue(object? value = null)
|
||||
{
|
||||
RawValue = value;
|
||||
}
|
||||
|
||||
public virtual BsonType BsonType => RawValue switch
|
||||
{
|
||||
null => BsonType.Null,
|
||||
BsonDocument => BsonType.Document,
|
||||
@@ -45,51 +39,390 @@ namespace MongoDB.Bson
|
||||
bool => BsonType.Boolean,
|
||||
int => BsonType.Int32,
|
||||
long => BsonType.Int64,
|
||||
double => BsonType.Double,
|
||||
DateTime => BsonType.DateTime,
|
||||
DateTimeOffset => BsonType.DateTime,
|
||||
Guid => BsonType.Guid,
|
||||
_ => BsonType.Null
|
||||
double or float or decimal => BsonType.Double,
|
||||
DateTime or DateTimeOffset => BsonType.DateTime,
|
||||
ObjectId => BsonType.ObjectId,
|
||||
byte[] => BsonType.Binary,
|
||||
Guid => BsonType.String,
|
||||
_ => BsonType.String
|
||||
};
|
||||
public bool IsString => _value is string;
|
||||
public bool IsBsonDocument => _value is BsonDocument;
|
||||
public bool IsBsonArray => _value is BsonArray;
|
||||
public bool IsBsonNull => _value is null;
|
||||
public string AsString => _value?.ToString() ?? string.Empty;
|
||||
public BsonDocument AsBsonDocument => _value as BsonDocument ?? throw new InvalidCastException();
|
||||
public BsonArray AsBsonArray => _value as BsonArray ?? throw new InvalidCastException();
|
||||
public Guid AsGuid => _value is Guid g ? g : Guid.Empty;
|
||||
public DateTime AsDateTime => _value switch
|
||||
|
||||
public bool IsString => BsonType == BsonType.String;
|
||||
public bool IsBoolean => BsonType == BsonType.Boolean;
|
||||
public bool IsBsonDocument => BsonType == BsonType.Document;
|
||||
public bool IsBsonArray => BsonType == BsonType.Array;
|
||||
public bool IsBsonNull => BsonType == BsonType.Null;
|
||||
public bool IsBsonDateTime => BsonType == BsonType.DateTime;
|
||||
public bool IsInt32 => BsonType == BsonType.Int32;
|
||||
public bool IsInt64 => BsonType == BsonType.Int64;
|
||||
|
||||
public BsonValue this[string key] => AsBsonDocument[key];
|
||||
public BsonValue this[int index] => AsBsonArray[index];
|
||||
|
||||
public string AsString => RawValue switch
|
||||
{
|
||||
DateTimeOffset dto => dto.UtcDateTime,
|
||||
DateTime dt => dt,
|
||||
_ => DateTime.MinValue
|
||||
null => string.Empty,
|
||||
string s => s,
|
||||
Guid g => g.ToString(),
|
||||
ObjectId o => o.ToString(),
|
||||
_ => Convert.ToString(RawValue, CultureInfo.InvariantCulture) ?? string.Empty
|
||||
};
|
||||
public int AsInt32 => _value is int i ? i : 0;
|
||||
public long AsInt64 => _value is long l ? l : 0;
|
||||
public double AsDouble => _value is double d ? d : 0d;
|
||||
public bool AsBoolean => _value is bool b && b;
|
||||
public bool IsInt32 => _value is int;
|
||||
public DateTime ToUniversalTime() => _value switch
|
||||
|
||||
public bool AsBoolean => RawValue switch
|
||||
{
|
||||
bool b => b,
|
||||
string s when bool.TryParse(s, out var b) => b,
|
||||
int i => i != 0,
|
||||
long l => l != 0,
|
||||
_ => false
|
||||
};
|
||||
|
||||
public int ToInt32() => RawValue switch
|
||||
{
|
||||
int i => i,
|
||||
long l => (int)l,
|
||||
double d => (int)d,
|
||||
string s when int.TryParse(s, NumberStyles.Any, CultureInfo.InvariantCulture, out var i) => i,
|
||||
_ => 0
|
||||
};
|
||||
|
||||
public int AsInt32 => ToInt32();
|
||||
|
||||
public long AsInt64 => RawValue switch
|
||||
{
|
||||
long l => l,
|
||||
int i => i,
|
||||
double d => (long)d,
|
||||
string s when long.TryParse(s, NumberStyles.Any, CultureInfo.InvariantCulture, out var l) => l,
|
||||
_ => 0L
|
||||
};
|
||||
|
||||
public Guid AsGuid => RawValue switch
|
||||
{
|
||||
Guid g => g,
|
||||
string s when Guid.TryParse(s, out var g) => g,
|
||||
_ => Guid.Empty
|
||||
};
|
||||
|
||||
public ObjectId AsObjectId => RawValue switch
|
||||
{
|
||||
ObjectId o => o,
|
||||
string s => ObjectId.Parse(s),
|
||||
_ => ObjectId.Empty
|
||||
};
|
||||
|
||||
public byte[]? AsByteArray => RawValue as byte[];
|
||||
|
||||
public DateTimeOffset AsDateTimeOffset => RawValue switch
|
||||
{
|
||||
DateTimeOffset dto => dto.ToUniversalTime(),
|
||||
DateTime dt => DateTime.SpecifyKind(dt, DateTimeKind.Utc),
|
||||
string s when DateTimeOffset.TryParse(s, CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal, out var dto) => dto.ToUniversalTime(),
|
||||
_ => DateTimeOffset.MinValue
|
||||
};
|
||||
|
||||
public DateTime ToUniversalTime() => RawValue switch
|
||||
{
|
||||
DateTimeOffset dto => dto.UtcDateTime,
|
||||
DateTime dt => dt.Kind == DateTimeKind.Utc ? dt : dt.ToUniversalTime(),
|
||||
string s when DateTimeOffset.TryParse(s, out var parsed) => parsed.UtcDateTime,
|
||||
_ => DateTime.MinValue
|
||||
DateTimeOffset dto => dto.UtcDateTime,
|
||||
string s when DateTimeOffset.TryParse(s, CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal, out var dto) => dto.UtcDateTime,
|
||||
_ => DateTime.SpecifyKind(DateTime.MinValue, DateTimeKind.Utc)
|
||||
};
|
||||
public override string ToString() => _value?.ToString() ?? string.Empty;
|
||||
|
||||
public BsonDocument AsBsonDocument => RawValue as BsonDocument ?? (this as BsonDocument ?? new BsonDocument());
|
||||
public BsonArray AsBsonArray => RawValue as BsonArray ?? (this as BsonArray ?? new BsonArray());
|
||||
|
||||
public override string ToString() => AsString;
|
||||
|
||||
internal virtual BsonValue Clone() => new BsonValue(RawValue);
|
||||
|
||||
public bool Equals(BsonValue? other) => other is not null && Equals(RawValue, other.RawValue);
|
||||
public override bool Equals(object? obj) => obj is BsonValue other && Equals(other);
|
||||
public override int GetHashCode() => RawValue?.GetHashCode() ?? 0;
|
||||
public static bool operator ==(BsonValue? left, string? right) => string.Equals(left?.AsString, right, StringComparison.Ordinal);
|
||||
public static bool operator !=(BsonValue? left, string? right) => !(left == right);
|
||||
public static bool operator ==(string? left, BsonValue? right) => right == left;
|
||||
public static bool operator !=(string? left, BsonValue? right) => !(left == right);
|
||||
|
||||
public static BsonValue Create(object? value) => BsonDocument.ToBsonValue(value);
|
||||
|
||||
public static implicit operator BsonValue(string value) => new(value);
|
||||
public static implicit operator BsonValue(Guid value) => new(value);
|
||||
public static implicit operator BsonValue(int value) => new(value);
|
||||
public static implicit operator BsonValue(long value) => new(value);
|
||||
public static implicit operator BsonValue(bool value) => new(value);
|
||||
public static implicit operator BsonValue(double value) => new(value);
|
||||
public static implicit operator BsonValue(DateTimeOffset value) => new(value);
|
||||
public static implicit operator BsonValue(DateTime value) => new(value);
|
||||
public static implicit operator BsonValue(byte[] value) => new(value);
|
||||
}
|
||||
|
||||
public class BsonString : BsonValue { public BsonString(string value) : base(value) { } }
|
||||
public class BsonBoolean : BsonValue { public BsonBoolean(bool value) : base(value) { } }
|
||||
public class BsonInt32 : BsonValue { public BsonInt32(int value) : base(value) { } }
|
||||
public class BsonInt64 : BsonValue { public BsonInt64(long value) : base(value) { } }
|
||||
public class BsonDouble : BsonValue { public BsonDouble(double value) : base(value) { } }
|
||||
public class BsonDateTime : BsonValue { public BsonDateTime(DateTime value) : base(value) { } }
|
||||
public class BsonNull : BsonValue
|
||||
public sealed class BsonNull : BsonValue
|
||||
{
|
||||
private BsonNull() : base(null) { }
|
||||
public static BsonNull Value { get; } = new();
|
||||
|
||||
private BsonNull()
|
||||
: base(null)
|
||||
{
|
||||
}
|
||||
|
||||
public override BsonType BsonType => BsonType.Null;
|
||||
}
|
||||
|
||||
public sealed class BsonString : BsonValue
|
||||
{
|
||||
public BsonString(string value) : base(value)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class BsonBinaryData : BsonValue
|
||||
{
|
||||
public BsonBinaryData(byte[] bytes)
|
||||
: base(bytes)
|
||||
{
|
||||
Bytes = bytes ?? Array.Empty<byte>();
|
||||
}
|
||||
|
||||
public byte[] Bytes { get; }
|
||||
|
||||
public Guid ToGuid()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Bytes.Length == 16)
|
||||
{
|
||||
return new Guid(Bytes);
|
||||
}
|
||||
|
||||
var asString = Encoding.UTF8.GetString(Bytes);
|
||||
return Guid.TryParse(asString, out var guid) ? guid : Guid.Empty;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return Guid.Empty;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class BsonDocument : BsonValue, IDictionary<string, BsonValue>
|
||||
{
|
||||
private readonly Dictionary<string, BsonValue> _values = new(StringComparer.Ordinal);
|
||||
|
||||
public BsonDocument()
|
||||
: base(null)
|
||||
{
|
||||
RawValue = this;
|
||||
}
|
||||
|
||||
public BsonDocument(IDictionary<string, object?> values)
|
||||
: this()
|
||||
{
|
||||
foreach (var kvp in values)
|
||||
{
|
||||
_values[kvp.Key] = ToBsonValue(kvp.Value);
|
||||
}
|
||||
}
|
||||
|
||||
public BsonDocument(IEnumerable<KeyValuePair<string, BsonValue>> values)
|
||||
: this()
|
||||
{
|
||||
foreach (var kvp in values)
|
||||
{
|
||||
_values[kvp.Key] = kvp.Value ?? new BsonValue();
|
||||
}
|
||||
}
|
||||
|
||||
public BsonDocument(string key, BsonValue value)
|
||||
: this()
|
||||
{
|
||||
Add(key, value);
|
||||
}
|
||||
|
||||
public BsonDocument(string key, object? value)
|
||||
: this()
|
||||
{
|
||||
Add(key, value);
|
||||
}
|
||||
|
||||
public int ElementCount => _values.Count;
|
||||
|
||||
public new BsonValue this[string key]
|
||||
{
|
||||
get => _values[key];
|
||||
set => _values[key] = value ?? new BsonValue();
|
||||
}
|
||||
|
||||
public ICollection<string> Keys => _values.Keys;
|
||||
public ICollection<BsonValue> Values => _values.Values;
|
||||
public int Count => _values.Count;
|
||||
public bool IsReadOnly => false;
|
||||
|
||||
public void Add(string key, BsonValue value) => _values[key] = value ?? new BsonValue();
|
||||
public void Add(string key, object? value) => _values[key] = ToBsonValue(value);
|
||||
public void Add(KeyValuePair<string, BsonValue> item) => Add(item.Key, item.Value);
|
||||
|
||||
public void Clear() => _values.Clear();
|
||||
public bool Contains(KeyValuePair<string, BsonValue> item) => _values.Contains(item);
|
||||
public bool ContainsKey(string key) => _values.ContainsKey(key);
|
||||
public bool Contains(string key) => ContainsKey(key);
|
||||
public void CopyTo(KeyValuePair<string, BsonValue>[] array, int arrayIndex) => ((IDictionary<string, BsonValue>)_values).CopyTo(array, arrayIndex);
|
||||
public IEnumerator<KeyValuePair<string, BsonValue>> GetEnumerator() => _values.GetEnumerator();
|
||||
IEnumerator IEnumerable.GetEnumerator() => _values.GetEnumerator();
|
||||
public bool Remove(string key) => _values.Remove(key);
|
||||
public bool Remove(KeyValuePair<string, BsonValue> item) => _values.Remove(item.Key);
|
||||
public bool TryGetValue(string key, out BsonValue value) => _values.TryGetValue(key, out value!);
|
||||
|
||||
public BsonValue GetValue(string key) => _values[key];
|
||||
|
||||
public BsonValue GetValue(string key, BsonValue defaultValue)
|
||||
=> _values.TryGetValue(key, out var value) ? value : defaultValue;
|
||||
|
||||
public string ToJson() => ToJson(null);
|
||||
|
||||
public string ToJson(MongoDB.Bson.IO.JsonWriterSettings? settings)
|
||||
{
|
||||
var ordered = _values
|
||||
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
|
||||
.ToDictionary(static kvp => kvp.Key, static kvp => BsonTypeMapper.MapToDotNetValue(kvp.Value));
|
||||
var options = new JsonSerializerOptions { WriteIndented = settings?.Indent ?? false };
|
||||
return JsonSerializer.Serialize(ordered, options);
|
||||
}
|
||||
|
||||
public byte[] ToBson() => Encoding.UTF8.GetBytes(ToJson());
|
||||
|
||||
public IEnumerable<BsonElement> Elements => _values.Select(static kvp => new BsonElement(kvp.Key, kvp.Value ?? new BsonValue()));
|
||||
|
||||
public BsonDocument DeepClone()
|
||||
{
|
||||
var copy = new BsonDocument();
|
||||
foreach (var kvp in _values)
|
||||
{
|
||||
copy._values[kvp.Key] = kvp.Value?.Clone() ?? new BsonValue();
|
||||
}
|
||||
return copy;
|
||||
}
|
||||
|
||||
public static BsonDocument Parse(string json)
|
||||
{
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
return FromElement(doc.RootElement).AsBsonDocument;
|
||||
}
|
||||
|
||||
private static BsonValue FromElement(JsonElement element)
|
||||
{
|
||||
return element.ValueKind switch
|
||||
{
|
||||
JsonValueKind.Object => FromObject(element),
|
||||
JsonValueKind.Array => FromArray(element),
|
||||
JsonValueKind.String => new BsonValue(element.GetString()),
|
||||
JsonValueKind.Number => element.TryGetInt64(out var l) ? new BsonValue(l) : new BsonValue(element.GetDouble()),
|
||||
JsonValueKind.True => new BsonValue(true),
|
||||
JsonValueKind.False => new BsonValue(false),
|
||||
JsonValueKind.Null or JsonValueKind.Undefined => new BsonValue(null),
|
||||
_ => new BsonValue(element.ToString())
|
||||
};
|
||||
}
|
||||
|
||||
private static BsonDocument FromObject(JsonElement element)
|
||||
{
|
||||
var doc = new BsonDocument();
|
||||
foreach (var property in element.EnumerateObject())
|
||||
{
|
||||
doc[property.Name] = FromElement(property.Value);
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
private static BsonArray FromArray(JsonElement element)
|
||||
{
|
||||
var array = new BsonArray();
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
array.Add(FromElement(item));
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
public static BsonValue ToBsonValue(object? value)
|
||||
{
|
||||
return value switch
|
||||
{
|
||||
null => new BsonValue(null),
|
||||
BsonValue bson => bson,
|
||||
string s => new BsonValue(s),
|
||||
Guid g => new BsonValue(g),
|
||||
int i => new BsonValue(i),
|
||||
long l => new BsonValue(l),
|
||||
bool b => new BsonValue(b),
|
||||
double d => new BsonValue(d),
|
||||
float f => new BsonValue(f),
|
||||
decimal dec => new BsonValue((double)dec),
|
||||
DateTime dt => new BsonValue(dt),
|
||||
DateTimeOffset dto => new BsonValue(dto),
|
||||
byte[] bytes => new BsonBinaryData(bytes),
|
||||
IEnumerable enumerable when value is not string => new BsonArray(enumerable.Cast<object?>().Select(ToBsonValue)),
|
||||
_ => new BsonValue(value)
|
||||
};
|
||||
}
|
||||
|
||||
internal override BsonValue Clone() => DeepClone();
|
||||
}
|
||||
|
||||
public sealed class BsonArray : BsonValue, IList<BsonValue>
|
||||
{
|
||||
private readonly List<BsonValue> _items = new();
|
||||
|
||||
public BsonArray()
|
||||
: base(null)
|
||||
{
|
||||
RawValue = this;
|
||||
}
|
||||
|
||||
public BsonArray(IEnumerable<BsonValue> items)
|
||||
: this()
|
||||
{
|
||||
_items.AddRange(items);
|
||||
}
|
||||
|
||||
public BsonArray(IEnumerable<object?> items)
|
||||
: this()
|
||||
{
|
||||
foreach (var item in items)
|
||||
{
|
||||
Add(item);
|
||||
}
|
||||
}
|
||||
|
||||
public new BsonValue this[int index]
|
||||
{
|
||||
get => _items[index];
|
||||
set => _items[index] = value ?? new BsonValue();
|
||||
}
|
||||
|
||||
public int Count => _items.Count;
|
||||
public bool IsReadOnly => false;
|
||||
|
||||
public void Add(BsonValue item) => _items.Add(item ?? new BsonValue());
|
||||
public void Add(object? item) => _items.Add(BsonDocument.ToBsonValue(item));
|
||||
public void AddRange(IEnumerable<object?> items)
|
||||
{
|
||||
foreach (var item in items)
|
||||
{
|
||||
Add(item);
|
||||
}
|
||||
}
|
||||
public void Clear() => _items.Clear();
|
||||
public bool Contains(BsonValue item) => _items.Contains(item);
|
||||
public void CopyTo(BsonValue[] array, int arrayIndex) => _items.CopyTo(array, arrayIndex);
|
||||
public IEnumerator<BsonValue> GetEnumerator() => _items.GetEnumerator();
|
||||
IEnumerator IEnumerable.GetEnumerator() => _items.GetEnumerator();
|
||||
public int IndexOf(BsonValue item) => _items.IndexOf(item);
|
||||
public void Insert(int index, BsonValue item) => _items.Insert(index, item ?? new BsonValue());
|
||||
public bool Remove(BsonValue item) => _items.Remove(item);
|
||||
public void RemoveAt(int index) => _items.RemoveAt(index);
|
||||
|
||||
internal override BsonValue Clone() => new BsonArray(_items.Select(i => i.Clone()));
|
||||
}
|
||||
|
||||
public sealed class BsonElement
|
||||
@@ -97,152 +430,89 @@ namespace MongoDB.Bson
|
||||
public BsonElement(string name, BsonValue value)
|
||||
{
|
||||
Name = name;
|
||||
Value = value;
|
||||
Value = value ?? new BsonValue();
|
||||
}
|
||||
|
||||
public string Name { get; }
|
||||
public BsonValue Value { get; }
|
||||
}
|
||||
|
||||
public class BsonBinaryData : BsonValue
|
||||
public readonly struct ObjectId : IEquatable<ObjectId>
|
||||
{
|
||||
private readonly byte[] _bytes;
|
||||
public BsonBinaryData(byte[] bytes) : base(null) => _bytes = bytes ?? Array.Empty<byte>();
|
||||
public BsonBinaryData(Guid guid) : this(guid.ToByteArray()) { }
|
||||
public byte[] AsByteArray => _bytes;
|
||||
public Guid ToGuid() => new(_bytes);
|
||||
private readonly string _value;
|
||||
|
||||
public ObjectId(string value)
|
||||
{
|
||||
_value = value;
|
||||
}
|
||||
|
||||
public static ObjectId Empty { get; } = new(string.Empty);
|
||||
|
||||
public override string ToString() => _value;
|
||||
|
||||
public static ObjectId Parse(string value) => new(value ?? string.Empty);
|
||||
|
||||
public bool Equals(ObjectId other) => string.Equals(_value, other._value, StringComparison.Ordinal);
|
||||
public override bool Equals(object? obj) => obj is ObjectId other && Equals(other);
|
||||
public override int GetHashCode() => _value?.GetHashCode(StringComparison.Ordinal) ?? 0;
|
||||
}
|
||||
|
||||
public class BsonArray : BsonValue, IEnumerable<BsonValue>
|
||||
public static class BsonTypeMapper
|
||||
{
|
||||
private readonly List<BsonValue> _items = new();
|
||||
public BsonArray() : base(null) { }
|
||||
public BsonArray(IEnumerable<BsonValue> values) : this() => _items.AddRange(values);
|
||||
public BsonArray(IEnumerable<object?> values) : this()
|
||||
public static object? MapToDotNetValue(BsonValue value)
|
||||
{
|
||||
foreach (var value in values)
|
||||
if (value is null) return null;
|
||||
|
||||
return value.BsonType switch
|
||||
{
|
||||
_items.Add(BsonDocument.WrapExternal(value));
|
||||
}
|
||||
BsonType.Document => value.AsBsonDocument.ToDictionary(static kvp => kvp.Key, static kvp => MapToDotNetValue(kvp.Value)),
|
||||
BsonType.Array => value.AsBsonArray.Select(MapToDotNetValue).ToArray(),
|
||||
BsonType.Null => null,
|
||||
BsonType.Boolean => value.AsBoolean,
|
||||
BsonType.Int32 => value.AsInt32,
|
||||
BsonType.Int64 => value.AsInt64,
|
||||
BsonType.DateTime => value.ToUniversalTime(),
|
||||
_ => value.AsString
|
||||
};
|
||||
}
|
||||
public void Add(BsonValue value) => _items.Add(value);
|
||||
public IEnumerator<BsonValue> GetEnumerator() => _items.GetEnumerator();
|
||||
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
|
||||
public BsonValue this[int index] { get => _items[index]; set => _items[index] = value; }
|
||||
public int Count => _items.Count;
|
||||
}
|
||||
|
||||
public class BsonDocument : BsonValue, IEnumerable<KeyValuePair<string, BsonValue>>
|
||||
public static class BsonJsonExtensions
|
||||
{
|
||||
private readonly Dictionary<string, BsonValue> _values = new(StringComparer.Ordinal);
|
||||
public BsonDocument() : base(null) { }
|
||||
public BsonDocument(string key, object? value) : this() => _values[key] = Wrap(value);
|
||||
public BsonDocument(IEnumerable<KeyValuePair<string, object?>> pairs) : this()
|
||||
public static string ToJson(this IEnumerable<BsonDocument> documents, MongoDB.Bson.IO.JsonWriterSettings? settings = null)
|
||||
{
|
||||
foreach (var kvp in pairs)
|
||||
{
|
||||
_values[kvp.Key] = Wrap(kvp.Value);
|
||||
}
|
||||
var options = new JsonSerializerOptions { WriteIndented = settings?.Indent ?? false };
|
||||
var payload = documents?.Select(BsonTypeMapper.MapToDotNetValue).ToList() ?? new List<object?>();
|
||||
return JsonSerializer.Serialize(payload, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
namespace MongoDB.Bson.Serialization.Attributes
|
||||
{
|
||||
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Class | AttributeTargets.Struct)]
|
||||
public sealed class BsonElementAttribute : Attribute
|
||||
{
|
||||
public BsonElementAttribute(string elementName)
|
||||
{
|
||||
ElementName = elementName;
|
||||
}
|
||||
|
||||
private static BsonValue Wrap(object? value) => value switch
|
||||
{
|
||||
BsonValue v => v,
|
||||
IEnumerable<BsonValue> enumerable => new BsonArray(enumerable),
|
||||
IEnumerable<object?> objEnum => new BsonArray(objEnum.Select(Wrap)),
|
||||
_ => new BsonValue(value)
|
||||
};
|
||||
|
||||
internal static BsonValue WrapExternal(object? value) => Wrap(value);
|
||||
|
||||
public BsonValue this[string key]
|
||||
{
|
||||
get => _values[key];
|
||||
set => _values[key] = Wrap(value);
|
||||
}
|
||||
|
||||
public int ElementCount => _values.Count;
|
||||
public IEnumerable<BsonElement> Elements => _values.Select(kvp => new BsonElement(kvp.Key, kvp.Value));
|
||||
|
||||
public bool Contains(string key) => _values.ContainsKey(key);
|
||||
|
||||
public bool TryGetValue(string key, out BsonValue value) => _values.TryGetValue(key, out value!);
|
||||
|
||||
public BsonValue GetValue(string key, BsonValue? defaultValue = null)
|
||||
{
|
||||
return _values.TryGetValue(key, out var value)
|
||||
? value
|
||||
: defaultValue ?? new BsonValue(null);
|
||||
}
|
||||
|
||||
public bool Remove(string key) => _values.Remove(key);
|
||||
|
||||
public void Add(string key, BsonValue value) => _values[key] = value;
|
||||
public void Add(string key, object? value) => _values[key] = Wrap(value);
|
||||
|
||||
public IEnumerator<KeyValuePair<string, BsonValue>> GetEnumerator() => _values.GetEnumerator();
|
||||
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
|
||||
|
||||
public BsonDocument DeepClone()
|
||||
{
|
||||
var clone = new BsonDocument();
|
||||
foreach (var kvp in _values)
|
||||
{
|
||||
clone[kvp.Key] = kvp.Value;
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
|
||||
public static BsonDocument Parse(string json)
|
||||
{
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
return FromElement(doc.RootElement);
|
||||
}
|
||||
|
||||
private static BsonDocument FromElement(JsonElement element)
|
||||
{
|
||||
var doc = new BsonDocument();
|
||||
foreach (var prop in element.EnumerateObject())
|
||||
{
|
||||
doc[prop.Name] = FromJsonValue(prop.Value);
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
private static BsonValue FromJsonValue(JsonElement element) => element.ValueKind switch
|
||||
{
|
||||
JsonValueKind.Object => FromElement(element),
|
||||
JsonValueKind.Array => new BsonArray(element.EnumerateArray().Select(FromJsonValue)),
|
||||
JsonValueKind.String => new BsonString(element.GetString() ?? string.Empty),
|
||||
JsonValueKind.Number => element.TryGetInt64(out var l) ? new BsonInt64(l) : new BsonDouble(element.GetDouble()),
|
||||
JsonValueKind.True => new BsonBoolean(true),
|
||||
JsonValueKind.False => new BsonBoolean(false),
|
||||
JsonValueKind.Null or JsonValueKind.Undefined => new BsonValue(null),
|
||||
_ => new BsonValue(null)
|
||||
};
|
||||
|
||||
public string ToJson(MongoDB.Bson.IO.JsonWriterSettings? settings = null)
|
||||
{
|
||||
var dict = _values.ToDictionary(kvp => kvp.Key, kvp => Unwrap(kvp.Value));
|
||||
return JsonSerializer.Serialize(dict, new JsonSerializerOptions(JsonSerializerDefaults.Web));
|
||||
}
|
||||
|
||||
public byte[] ToBson() => Encoding.UTF8.GetBytes(ToJson());
|
||||
|
||||
private static object? Unwrap(BsonValue value) => value switch
|
||||
{
|
||||
BsonDocument doc => doc._values.ToDictionary(kvp => kvp.Key, kvp => Unwrap(kvp.Value)),
|
||||
BsonArray array => array.Select(Unwrap).ToArray(),
|
||||
_ => value.RawValue
|
||||
};
|
||||
public string ElementName { get; }
|
||||
}
|
||||
}
|
||||
|
||||
namespace MongoDB.Bson.IO
|
||||
{
|
||||
public enum JsonOutputMode { Strict, RelaxedExtendedJson }
|
||||
public class JsonWriterSettings
|
||||
public enum JsonOutputMode
|
||||
{
|
||||
Strict,
|
||||
RelaxedExtendedJson
|
||||
}
|
||||
|
||||
public sealed class JsonWriterSettings
|
||||
{
|
||||
public bool Indent { get; set; }
|
||||
public JsonOutputMode OutputMode { get; set; } = JsonOutputMode.Strict;
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user