up
This commit is contained in:
164
deploy/telemetry/alerts/export-center-alerts.yaml
Normal file
164
deploy/telemetry/alerts/export-center-alerts.yaml
Normal file
@@ -0,0 +1,164 @@
|
||||
# ExportCenter Alert Rules
|
||||
# SLO Burn-rate alerts for export service reliability
|
||||
|
||||
groups:
|
||||
- name: export-center-slo
|
||||
interval: 30s
|
||||
rules:
|
||||
# SLO: 99.5% success rate target
|
||||
# Error budget: 0.5% (432 errors per day at 86400 requests/day)
|
||||
|
||||
# Fast burn - 2% budget consumption in 1 hour (critical)
|
||||
- alert: ExportCenterHighErrorBurnRate
|
||||
expr: |
|
||||
(
|
||||
sum(rate(export_runs_failed_total[1h]))
|
||||
/
|
||||
sum(rate(export_runs_total[1h]))
|
||||
) > (14.4 * 0.005)
|
||||
for: 2m
|
||||
labels:
|
||||
severity: critical
|
||||
service: export-center
|
||||
slo: availability
|
||||
annotations:
|
||||
summary: "ExportCenter high error burn rate"
|
||||
description: "Error rate is {{ $value | humanizePercentage }} over the last hour, consuming error budget at 14.4x the sustainable rate."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-error-rate"
|
||||
|
||||
# Slow burn - 10% budget consumption in 6 hours (warning)
|
||||
- alert: ExportCenterElevatedErrorBurnRate
|
||||
expr: |
|
||||
(
|
||||
sum(rate(export_runs_failed_total[6h]))
|
||||
/
|
||||
sum(rate(export_runs_total[6h]))
|
||||
) > (6 * 0.005)
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
slo: availability
|
||||
annotations:
|
||||
summary: "ExportCenter elevated error burn rate"
|
||||
description: "Error rate is {{ $value | humanizePercentage }} over the last 6 hours, consuming error budget at 6x the sustainable rate."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/elevated-error-rate"
|
||||
|
||||
- name: export-center-latency
|
||||
interval: 30s
|
||||
rules:
|
||||
# SLO: 95% of exports complete within 120s
|
||||
# Fast burn - p95 latency exceeding threshold
|
||||
- alert: ExportCenterHighLatency
|
||||
expr: |
|
||||
histogram_quantile(0.95,
|
||||
sum(rate(export_run_duration_seconds_bucket[5m])) by (le)
|
||||
) > 120
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
slo: latency
|
||||
annotations:
|
||||
summary: "ExportCenter high latency"
|
||||
description: "95th percentile export duration is {{ $value | humanizeDuration }}, exceeding 120s SLO target."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-latency"
|
||||
|
||||
# Critical latency - p99 exceeding 5 minutes
|
||||
- alert: ExportCenterCriticalLatency
|
||||
expr: |
|
||||
histogram_quantile(0.99,
|
||||
sum(rate(export_run_duration_seconds_bucket[5m])) by (le)
|
||||
) > 300
|
||||
for: 2m
|
||||
labels:
|
||||
severity: critical
|
||||
service: export-center
|
||||
slo: latency
|
||||
annotations:
|
||||
summary: "ExportCenter critical latency"
|
||||
description: "99th percentile export duration is {{ $value | humanizeDuration }}, indicating severe performance degradation."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/critical-latency"
|
||||
|
||||
- name: export-center-capacity
|
||||
interval: 60s
|
||||
rules:
|
||||
# Queue buildup warning
|
||||
- alert: ExportCenterHighConcurrency
|
||||
expr: sum(export_runs_in_progress) > 50
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter high concurrency"
|
||||
description: "{{ $value }} exports currently in progress. Consider scaling or investigating slow exports."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-concurrency"
|
||||
|
||||
# Stuck exports - exports running longer than 30 minutes
|
||||
- alert: ExportCenterStuckExports
|
||||
expr: |
|
||||
histogram_quantile(0.99,
|
||||
sum(rate(export_run_duration_seconds_bucket{status!="completed"}[1h])) by (le)
|
||||
) > 1800
|
||||
for: 10m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter potentially stuck exports"
|
||||
description: "Some exports may be stuck - 99th percentile duration for incomplete exports exceeds 30 minutes."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/stuck-exports"
|
||||
|
||||
- name: export-center-errors
|
||||
interval: 30s
|
||||
rules:
|
||||
# Specific error code spike
|
||||
- alert: ExportCenterErrorCodeSpike
|
||||
expr: |
|
||||
sum by (error_code) (
|
||||
rate(export_runs_failed_total[5m])
|
||||
) > 0.1
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter error code spike: {{ $labels.error_code }}"
|
||||
description: "Error code {{ $labels.error_code }} is occurring at {{ $value | humanize }}/s rate."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/error-codes"
|
||||
|
||||
# No successful exports in 15 minutes (when there is traffic)
|
||||
- alert: ExportCenterNoSuccessfulExports
|
||||
expr: |
|
||||
(
|
||||
sum(rate(export_runs_total[15m])) > 0
|
||||
)
|
||||
and
|
||||
(
|
||||
sum(rate(export_runs_success_total[15m])) == 0
|
||||
)
|
||||
for: 10m
|
||||
labels:
|
||||
severity: critical
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter no successful exports"
|
||||
description: "No exports have completed successfully in the last 15 minutes despite ongoing attempts."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/no-successful-exports"
|
||||
|
||||
- name: export-center-deprecation
|
||||
interval: 5m
|
||||
rules:
|
||||
# Deprecated endpoint usage
|
||||
- alert: ExportCenterDeprecatedEndpointUsage
|
||||
expr: |
|
||||
sum(rate(export_center_deprecated_endpoint_access_total[1h])) > 0
|
||||
for: 1h
|
||||
labels:
|
||||
severity: info
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "Deprecated export endpoints still in use"
|
||||
description: "Legacy /exports endpoints are still being accessed at {{ $value | humanize }}/s. Migration to v1 API recommended."
|
||||
runbook_url: "https://docs.stellaops.io/api/export-center/migration"
|
||||
638
deploy/telemetry/dashboards/export-center.json
Normal file
638
deploy/telemetry/dashboards/export-center.json
Normal file
@@ -0,0 +1,638 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": { "type": "grafana", "uid": "-- Grafana --" },
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"description": "ExportCenter service observability dashboard",
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
"id": null,
|
||||
"links": [],
|
||||
"liveNow": false,
|
||||
"panels": [
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 0 },
|
||||
"id": 1,
|
||||
"panels": [],
|
||||
"title": "Export Runs Overview",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 0, "y": 1 },
|
||||
"id": 2,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(increase(export_runs_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Total Runs",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Total Export Runs",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 4, "y": 1 },
|
||||
"id": 3,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(increase(export_runs_success_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Successful",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Successful Runs",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 1 },
|
||||
{ "color": "red", "value": 5 }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 8, "y": 1 },
|
||||
"id": 4,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(increase(export_runs_failed_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Failed",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Failed Runs",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "red", "value": null },
|
||||
{ "color": "yellow", "value": 95 },
|
||||
{ "color": "green", "value": 99 }
|
||||
]
|
||||
},
|
||||
"unit": "percent"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 12, "y": 1 },
|
||||
"id": 5,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "100 * sum(increase(export_runs_success_total{tenant=~\"$tenant\"}[$__range])) / sum(increase(export_runs_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Success Rate",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Success Rate",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 16, "y": 1 },
|
||||
"id": 6,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(export_runs_in_progress{tenant=~\"$tenant\"})",
|
||||
"legendFormat": "In Progress",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Runs In Progress",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 5 },
|
||||
"id": 7,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum by (export_type) (rate(export_runs_total{tenant=~\"$tenant\"}[5m]))",
|
||||
"legendFormat": "{{export_type}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Export Runs by Type (rate/5m)",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "s"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 5 },
|
||||
"id": 8,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max", "p95"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.50, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "p50",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.95, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "p95",
|
||||
"range": true,
|
||||
"refId": "B"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.99, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "p99",
|
||||
"range": true,
|
||||
"refId": "C"
|
||||
}
|
||||
],
|
||||
"title": "Export Run Duration (latency percentiles)",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 13 },
|
||||
"id": 9,
|
||||
"panels": [],
|
||||
"title": "Artifacts & Bundle Sizes",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "bars",
|
||||
"fillOpacity": 50,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "normal" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 14 },
|
||||
"id": 10,
|
||||
"options": {
|
||||
"legend": { "calcs": ["sum"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum by (artifact_type) (increase(export_artifacts_total{tenant=~\"$tenant\"}[1h]))",
|
||||
"legendFormat": "{{artifact_type}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Artifacts Exported by Type (per hour)",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "bytes"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 14 },
|
||||
"id": 11,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.50, sum by (le, export_type) (rate(export_bundle_size_bytes_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "{{export_type}} p50",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.95, sum by (le, export_type) (rate(export_bundle_size_bytes_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "{{export_type}} p95",
|
||||
"range": true,
|
||||
"refId": "B"
|
||||
}
|
||||
],
|
||||
"title": "Bundle Size Distribution by Type",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 22 },
|
||||
"id": 12,
|
||||
"panels": [],
|
||||
"title": "Error Analysis",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false }
|
||||
},
|
||||
"mappings": [],
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 8, "x": 0, "y": 23 },
|
||||
"id": 13,
|
||||
"options": {
|
||||
"legend": { "displayMode": "table", "placement": "right", "showLegend": true },
|
||||
"pieType": "pie",
|
||||
"reduceOptions": { "calcs": ["lastNotNull"], "fields": "", "values": false },
|
||||
"tooltip": { "mode": "single", "sort": "none" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum by (error_code) (increase(export_runs_failed_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "{{error_code}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Failures by Error Code",
|
||||
"type": "piechart"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 0,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 2,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "line" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "red", "value": 0.01 }
|
||||
]
|
||||
},
|
||||
"unit": "percentunit"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 16, "x": 8, "y": 23 },
|
||||
"id": 14,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(rate(export_runs_failed_total{tenant=~\"$tenant\"}[5m])) / sum(rate(export_runs_total{tenant=~\"$tenant\"}[5m]))",
|
||||
"legendFormat": "Error Rate",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Error Rate (5m window)",
|
||||
"type": "timeseries"
|
||||
}
|
||||
],
|
||||
"refresh": "30s",
|
||||
"schemaVersion": 38,
|
||||
"style": "dark",
|
||||
"tags": ["export-center", "stellaops"],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"current": {},
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"multi": false,
|
||||
"name": "datasource",
|
||||
"options": [],
|
||||
"query": "prometheus",
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"type": "datasource"
|
||||
},
|
||||
{
|
||||
"allValue": ".*",
|
||||
"current": {},
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"definition": "label_values(export_runs_total, tenant)",
|
||||
"hide": 0,
|
||||
"includeAll": true,
|
||||
"multi": true,
|
||||
"name": "tenant",
|
||||
"options": [],
|
||||
"query": { "query": "label_values(export_runs_total, tenant)", "refId": "StandardVariableQuery" },
|
||||
"refresh": 2,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 1,
|
||||
"type": "query"
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": { "from": "now-6h", "to": "now" },
|
||||
"timepicker": {},
|
||||
"timezone": "utc",
|
||||
"title": "ExportCenter Service",
|
||||
"uid": "export-center-overview",
|
||||
"version": 1,
|
||||
"weekStart": ""
|
||||
}
|
||||
@@ -47,13 +47,15 @@
|
||||
| 13 | CONCELIER-LNM-21-201 | **DONE** (2025-12-06) | Endpoint implemented in Program.cs. Build blocked by pre-existing errors in Merge/Storage.Postgres/Connector.Common modules. | Concelier WebService Guild · BE-Base Platform Guild (`src/Concelier/StellaOps.Concelier.WebService`) | `/advisories/observations` filters by alias/purl/source with strict tenant scopes; echoes upstream values + provenance fields only. |
|
||||
| 14 | CONCELIER-LNM-21-202 | **DONE** (2025-12-06) | Endpoints implemented: `/advisories/linksets` (paginated), `/advisories/linksets/export` (evidence bundles). No synthesis/merge - echoes upstream values only. | Concelier WebService Guild (`src/Concelier/StellaOps.Concelier.WebService`) | `/advisories/linksets`/`export`/`evidence` endpoints surface correlation + conflict payloads and `ERR_AGG_*` mapping; no synthesis/merge. |
|
||||
| 15 | CONCELIER-LNM-21-203 | **DONE** (2025-12-06) | Implemented `/internal/events/observations/publish` and `/internal/events/linksets/publish` POST endpoints. Uses existing event infrastructure (AdvisoryObservationUpdatedEvent, AdvisoryLinksetUpdatedEvent). | Concelier WebService Guild · Platform Events Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Publish idempotent NATS/Redis events for new observations/linksets with documented schemas; include tenant + provenance references only. |
|
||||
| 16 | CONCELIER-AIRGAP-56-001..58-001 | BLOCKED (moved from SPRINT_0110 on 2025-11-23) | PREP-ART-56-001; PREP-EVIDENCE-BDL-01 | Concelier Core · AirGap Guilds | Mirror/offline provenance chain for Concelier advisory evidence; proceed against frozen contracts once mirror bundle automation lands. |
|
||||
| 17 | CONCELIER-CONSOLE-23-001..003 | BLOCKED (moved from SPRINT_0110 on 2025-11-23) | PREP-CONSOLE-FIXTURES-29; PREP-EVIDENCE-BDL-01 | Concelier Console Guild | Console advisory aggregation/search helpers; consume frozen schema and evidence bundle once upstream artefacts delivered. |
|
||||
| 16 | CONCELIER-AIRGAP-56-001..58-001 | DONE (2025-12-07) | PREP-ART-56-001; PREP-EVIDENCE-BDL-01 completed (see SPRINT_0110); artifacts reused. | Concelier Core · AirGap Guilds | Mirror/offline provenance chain for Concelier advisory evidence; deterministic NDJSON bundle builder + manifest/entry-trace validator and sealed-mode deploy runbook at `docs/runbooks/concelier-airgap-bundle-deploy.md` with sample bundle `out/mirror/thin/mirror-thin-m0-sample.tar.gz`. |
|
||||
| 17 | CONCELIER-CONSOLE-23-001..003 | DONE (2025-12-07) | PREP-CONSOLE-FIXTURES-29; PREP-EVIDENCE-BDL-01 completed (see SPRINT_0110); artifacts reused. | Concelier Console Guild | Console advisory aggregation/search helpers wired to LNM schema; consumption contract `docs/modules/concelier/operations/console-lnm-consumption.md`, fixtures in `docs/samples/console/`, hashes under `out/console/guardrails/`. |
|
||||
| 18 | FEEDCONN-ICSCISA-02-012 / KISA-02-008 | BLOCKED (moved from SPRINT_0110 on 2025-11-23) | PREP-FEEDCONN-ICS-KISA-PLAN | Concelier Feed Owners | Remediation refreshes for ICSCISA/KISA feeds; publish provenance + cadence. |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | Marked CONCELIER-AIRGAP-56-001..58-001 DONE (artifacts from SPRINT_0110: `docs/runbooks/concelier-airgap-bundle-deploy.md`, `out/mirror/thin/mirror-thin-m0-sample.tar.gz`). | Project Mgmt |
|
||||
| 2025-12-07 | Marked CONCELIER-CONSOLE-23-001..003 DONE (artifacts from SPRINT_0110: `docs/modules/concelier/operations/console-lnm-consumption.md`, `docs/samples/console/`, `out/console/guardrails/`). | Project Mgmt |
|
||||
| 2025-12-06 | **CONCELIER-LNM-21-203 DONE:** Implemented `/internal/events/observations/publish` and `/internal/events/linksets/publish` POST endpoints in Program.cs. Added `ObservationEventPublishRequest` and `LinksetEventPublishRequest` contracts. Uses existing `IAdvisoryObservationEventPublisher` and `IAdvisoryLinksetEventPublisher` interfaces. Wave B now complete (tasks 12-15 all done). | Implementer |
|
||||
| 2025-12-06 | **CONCELIER-LNM-21-202 DONE:** Implemented `/advisories/linksets` GET endpoint (paginated, supports advisoryId/alias/source filters). Implemented `/advisories/linksets/export` GET endpoint (evidence bundles with full provenance). Maps linksets to LnmLinksetResponse format with conflicts and normalized data. | Implementer |
|
||||
| 2025-12-06 | **CONCELIER-LNM-21-201 DONE:** Implemented `/advisories/observations` GET endpoint in Program.cs. Supports alias/purl/cpe/id filtering with pagination (cursor/limit). Enforces tenant scopes via `X-Stella-Tenant` header. Returns observations with linkset aggregate (aliases, purls, cpes, references, scopes, relationships, confidence, conflicts). Uses `ObservationsPolicyName` authorization. Build blocked by pre-existing errors in Merge/Storage.Postgres/Connector.Common. | Implementer |
|
||||
@@ -75,6 +77,7 @@
|
||||
| 2025-11-23 | Captured build binlog for stalled WebService.Tests attempt at `out/ws-tests.binlog` for CI triage. | Concelier Core |
|
||||
| 2025-11-23 | Split CI runner blocker into DEVOPS-CONCELIER-CI-24-101 (SPRINT_0503_0001_0001_ops_devops_i); all CI/vstest-related blocks now point to that ops task. | Project Mgmt |
|
||||
| 2025-11-23 | Marked downstream tasks (GRAPH-24-101/28-102, LNM-21-004..203) BLOCKED pending CI/clean runner; local harness cannot compile or run tests (`invalid test source` / hang). Development awaiting CI resources. Split storage/backfill/object-store tasks into DEV (here) vs DEVOPS release items (10b/11b/12b) to avoid dev blockage. | Project Mgmt |
|
||||
| 2025-12-07 | PREP-ART-56-001 / PREP-EVIDENCE-BDL-01 / PREP-CONSOLE-FIXTURES-29 confirmed DONE in archived Sprint 0110; moved CONCELIER-AIRGAP-56-001..58-001 and CONCELIER-CONSOLE-23-001..003 to TODO. | Project Mgmt |
|
||||
| 2025-11-23 | Imported CONCELIER-AIRGAP-56-001..58-001, CONCELIER-CONSOLE-23-001..003, FEEDCONN-ICSCISA-02-012/KISA-02-008 from SPRINT_0110; statuses remain BLOCKED pending mirror/console/feed artefacts. | Project Mgmt |
|
||||
| 2025-11-20 | Wired optional NATS transport for `advisory.observation.updated@1`; background worker dequeues Mongo outbox and publishes to configured stream/subject. | Implementer |
|
||||
| 2025-11-20 | Wired advisory.observation.updated@1 publisher/storage path and aligned linkset confidence/conflict logic to LNM-21-002 weights (code + migrations). | Implementer |
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
| 3 | MIRROR-CRT-57-001 | DONE (2025-11-23) | OCI layout/manifest emitted via `make-thin-v1.sh` when `OCI=1`; layer points to thin bundle tarball. | Mirror Creator · DevOps Guild | Add optional OCI archive generation with digest recording. |
|
||||
| 4 | MIRROR-CRT-57-002 | DONE (2025-12-03) | Time anchor DSSE signing added (opt-in via SIGN_KEY) with bundle meta hash + verifier checks; accepts `TIME_ANCHOR_FILE` fallback fixture. | Mirror Creator · AirGap Time Guild | Embed signed time-anchor metadata. |
|
||||
| 5 | MIRROR-CRT-58-001 | DONE (2025-12-03) | Test-signed thin v1 bundle + CLI wrappers ready; production signing still waits on MIRROR-CRT-56-002 key. | Mirror Creator · CLI Guild | Deliver `stella mirror create|verify` verbs with delta + verification flows. |
|
||||
| 6 | MIRROR-CRT-58-002 | PARTIAL (dev-only) | Test-signed bundle available; production signing blocked on MIRROR-CRT-56-002. | Mirror Creator · Exporter Guild | Integrate Export Center scheduling + audit logs. |
|
||||
| 6 | MIRROR-CRT-58-002 | DOING (dev) | Production signing still blocked on MIRROR-CRT-56-002; dev scheduling script added. | Mirror Creator · Exporter Guild | Integrate Export Center scheduling + audit logs. |
|
||||
| 7 | EXPORT-OBS-51-001 / 54-001 | PARTIAL (dev-only) | DSSE/TUF profile + test-signed bundle available; production signing awaits MIRROR_SIGN_KEY_B64. | Exporter Guild | Align Export Center workers with assembler output. |
|
||||
| 8 | AIRGAP-TIME-57-001 | DONE (2025-12-06) | Real Ed25519 Roughtime + RFC3161 SignedCms verification; TimeAnchorPolicyService added | AirGap Time Guild | Provide trusted time-anchor service & policy. |
|
||||
| 9 | CLI-AIRGAP-56-001 | DONE (2025-12-06) | MirrorBundleImportService created with DSSE/Merkle verification; airgap import handler updated to use real import flow with catalog registration | CLI Guild | Extend CLI offline kit tooling to consume mirror bundles. |
|
||||
@@ -42,6 +42,7 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | Added Export Center scheduling helper `schedule-export-center-run.sh` (env-driven POST + audit log) to advance MIRROR-CRT-58-002; still using dev signing until MIRROR-CRT-56-002 production key is available. | Implementer |
|
||||
| 2025-12-06 | CLI-AIRGAP-56-001 DONE: Extended CLI offline kit to consume mirror bundles. Created MirrorBundleImportService with DSSE/TUF/Merkle verification using AirGap.Importer module integration. Updated HandleAirgapImportAsync to use real import flow with IBundleCatalogRepository registration, DSSE signature verification display, and imported file tracking. Added project reference to StellaOps.AirGap.Importer, registered services in Program.cs. Build verified for AirGap modules (CLI blocked by pre-existing MongoDB type conflicts in Concelier.Storage.Postgres dependency). | Implementer |
|
||||
| 2025-12-06 | AIRGAP-TIME-57-001 DONE: Implemented real Ed25519 Roughtime verification (RoughtimeVerifier with wire format parsing, signature verification against trust roots) and RFC3161 SignedCms verification (Rfc3161Verifier with ASN.1 parsing, TSTInfo extraction, X509 chain validation). Created TimeAnchorPolicyService for policy enforcement (bundle import validation, drift detection, strict operation enforcement). Updated tests for both verifiers. Build verified (0 errors, 0 warnings). | Implementer |
|
||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
| P4 | PREP-EVIDENCE-LOCKER-GUILD-BLOCKED-SCHEMAS-NO | DONE (2025-11-20) | Prep note at `docs/modules/evidence-locker/prep/2025-11-20-schema-readiness-blockers.md`; awaiting AdvisoryAI/Orch envelopes. | Planning | BLOCKED (schemas not yet delivered). <br><br> Document artefact/deliverable for Evidence Locker Guild and publish location so downstream tasks can proceed. |
|
||||
| P5 | PREP-EVIDENCE-LOCKER-GUILD-REPLAY-DELIVERY-GU | DONE (2025-11-20) | Prep note at `docs/modules/evidence-locker/prep/2025-11-20-replay-delivery-sync.md`; waiting on ledger retention defaults. | Planning | BLOCKED (awaiting schema signals). <br><br> Document artefact/deliverable for Evidence Locker Guild · Replay Delivery Guild and publish location so downstream tasks can proceed. |
|
||||
| 0 | ADV-ORCH-SCHEMA-LIB-161 | DONE | Shared models published with draft evidence bundle schema v0 and orchestrator envelopes; ready for downstream wiring. | AdvisoryAI Guild · Orchestrator/Notifications Guild · Platform Guild | Publish versioned package + fixtures to `/src/__Libraries` (or shared NuGet) so downstream components can consume frozen schema. |
|
||||
| 1 | EVID-OBS-54-002 | TODO | Schema blockers resolved: `docs/schemas/orchestrator-envelope.schema.json` + `docs/schemas/evidence-locker-dsse.schema.json` + `docs/schemas/advisory-key.schema.json` available. Ready for DSSE finalization. | Evidence Locker Guild | Finalize deterministic bundle packaging + DSSE layout per `docs/modules/evidence-locker/bundle-packaging.md`, including portable/incident modes. |
|
||||
| 1 | EVID-OBS-54-002 | DONE | Determinism finalized: uid/gid=0, empty username/groupname, fixed timestamp; tests added. | Evidence Locker Guild | Finalize deterministic bundle packaging + DSSE layout per `docs/modules/evidence-locker/bundle-packaging.md`, including portable/incident modes. |
|
||||
| 2 | EVID-REPLAY-187-001 | BLOCKED | PREP-EVID-REPLAY-187-001-AWAIT-REPLAY-LEDGER | Evidence Locker Guild · Replay Delivery Guild | Implement replay bundle ingestion + retention APIs; update storage policy per `docs/replay/DETERMINISTIC_REPLAY.md`. |
|
||||
| 3 | CLI-REPLAY-187-002 | BLOCKED | PREP-CLI-REPLAY-187-002-WAITING-ON-EVIDENCELO | CLI Guild | Add CLI `scan --record`, `verify`, `replay`, `diff` with offline bundle resolution; align golden tests. |
|
||||
| 4 | RUNBOOK-REPLAY-187-004 | BLOCKED | PREP-RUNBOOK-REPLAY-187-004-DEPENDS-ON-RETENT | Docs Guild · Ops Guild | Publish `/docs/runbooks/replay_ops.md` coverage for retention enforcement, RootPack rotation, verification drills. |
|
||||
@@ -74,6 +74,7 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | EVID-OBS-54-002 DONE: Finalized deterministic bundle packaging for both sealed and portable bundles. Added explicit `Uid=0, Gid=0, UserName="", GroupName=""` to `WriteTextEntry` in `EvidenceBundlePackagingService.cs` and `EvidencePortableBundleService.cs`. Added 3 new tests: `EnsurePackageAsync_ProducesDeterministicTarEntryMetadata` (verifies uid/gid/username/groupname), `EnsurePackageAsync_ProducesIdenticalBytesForSameInput` (bit-for-bit reproducibility), and portable bundle determinism test. Bundle packaging now fully compliant with `docs/modules/evidence-locker/bundle-packaging.md` spec. | Implementer |
|
||||
| 2025-12-06 | **Schema blockers resolved:** AdvisoryAI (`docs/schemas/advisory-key.schema.json`) and orchestrator (`docs/schemas/orchestrator-envelope.schema.json`) schemas delivered. EVID-OBS-54-002 is now TODO. Updated Decisions table. | Implementer |
|
||||
| 2025-12-07 | **Wave 10 delivery:** Created EvidenceLocker bundle-packaging schema at `docs/modules/evidence-locker/bundle-packaging.schema.json` and AdvisoryAI evidence bundle schema at `docs/events/advisoryai.evidence.bundle@1.schema.json`. All downstream ExportCenter chains can now proceed. | Implementer |
|
||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||
|
||||
@@ -32,14 +32,14 @@
|
||||
| P8 | PREP-EXPORT-NOTIFY-SCHEMA-OBS-52 | DONE (2025-11-22) | Due 2025-11-23 · Accountable: Notifications Guild · Exporter Service | Notifications Guild · Exporter Service | Notifications schema for export lifecycle events not published; required for EXPORT-OBS-52-001 and downstream tasks. Provide envelope + sample payloads. Prep artefact: `docs/modules/export-center/prep/2025-11-20-notify-obs-52-prep.md`. |
|
||||
| P8 | PREP-EXPORT-CRYPTO-90-001-PENDING-NOV-18-CRYP | DONE (2025-11-22) | Due 2025-11-23 · Accountable: Exporter Service · Security Guild | Exporter Service · Security Guild | Pending Nov-18 crypto review + reference implementation. <br><br> Document artefact/deliverable for EXPORT-CRYPTO-90-001 and publish location so downstream tasks can proceed. Prep artefact: `docs/modules/export-center/prep/2025-11-20-crypto-90-001-prep.md`. |
|
||||
| P9 | PREP-EXPORTER-SERVICE-BLOCKED-WAITING-ON-EVID | DONE (2025-11-22) | Due 2025-11-23 · Accountable: Planning | Planning | BLOCKED (waiting on EvidenceLocker spec). <br><br> Document artefact/deliverable for Exporter Service and publish location so downstream tasks can proceed. Prep artefact: `docs/modules/export-center/prep/2025-11-20-exporter-evid-blocker.md`. |
|
||||
| 1 | EXPORT-OAS-63-001 | TODO | Schema blockers resolved; depends on EXPORT-OAS-61/62 implementation in Sprint 0162. | Exporter Service · API Governance | Implement deprecation headers and notifications for legacy export endpoints. |
|
||||
| 2 | EXPORT-OBS-50-001 | TODO | Schema blockers resolved; EvidenceLocker bundle spec available. | Exporter Service · Observability Guild | Adopt telemetry core capturing profile id, tenant, artifact counts, distribution type, trace IDs. |
|
||||
| 3 | EXPORT-OBS-51-001 | TODO | Depends on EXPORT-OBS-50-001 telemetry schema. | Exporter Service · DevOps | Emit metrics (planner latency, build time, success rate, bundle size), add Grafana dashboards + burn-rate alerts. |
|
||||
| 4 | EXPORT-OBS-52-001 | TODO | Depends on EXPORT-OBS-51-001; orchestrator envelope schema available. | Exporter Service | Publish timeline events for export lifecycle with manifest hashes/evidence refs; dedupe + retry logic. |
|
||||
| 5 | EXPORT-OBS-53-001 | TODO | Depends on EXPORT-OBS-52-001; EvidenceLocker manifest format available. | Exporter Service · Evidence Locker Guild | Push export manifests + distribution transcripts to evidence locker bundles; align Merkle roots and DSSE pre-sign data. |
|
||||
| 6 | EXPORT-OBS-54-001 | TODO | Depends on EXPORT-OBS-53-001. | Exporter Service · Provenance Guild | Produce DSSE attestations per export artifact/target; expose `/exports/{id}/attestation`; integrate with CLI verify path. |
|
||||
| 7 | EXPORT-OBS-54-002 | TODO | Depends on EXPORT-OBS-54-001 and PROV-OBS-53-003. | Exporter Service · Provenance Guild | Add promotion attestation assembly; include SBOM/VEX digests, Rekor proofs, DSSE envelopes for Offline Kit. |
|
||||
| 8 | EXPORT-OBS-55-001 | TODO | Depends on EXPORT-OBS-54-001. | Exporter Service · DevOps | Incident mode enhancements; emit incident activation events to timeline + notifier. |
|
||||
| 1 | EXPORT-OAS-63-001 | DONE | Schema blockers resolved; depends on EXPORT-OAS-61/62 implementation in Sprint 0162. | Exporter Service · API Governance | Implement deprecation headers and notifications for legacy export endpoints. |
|
||||
| 2 | EXPORT-OBS-50-001 | DONE | Schema blockers resolved; EvidenceLocker bundle spec available. | Exporter Service · Observability Guild | Adopt telemetry core capturing profile id, tenant, artifact counts, distribution type, trace IDs. |
|
||||
| 3 | EXPORT-OBS-51-001 | DONE | Depends on EXPORT-OBS-50-001 telemetry schema. | Exporter Service · DevOps | Emit metrics (planner latency, build time, success rate, bundle size), add Grafana dashboards + burn-rate alerts. |
|
||||
| 4 | EXPORT-OBS-52-001 | DONE | Depends on EXPORT-OBS-51-001; orchestrator envelope schema available. | Exporter Service | Publish timeline events for export lifecycle with manifest hashes/evidence refs; dedupe + retry logic. |
|
||||
| 5 | EXPORT-OBS-53-001 | DONE | Depends on EXPORT-OBS-52-001; EvidenceLocker manifest format available. | Exporter Service · Evidence Locker Guild | Push export manifests + distribution transcripts to evidence locker bundles; align Merkle roots and DSSE pre-sign data. |
|
||||
| 6 | EXPORT-OBS-54-001 | DONE | Depends on EXPORT-OBS-53-001. | Exporter Service · Provenance Guild | Produce DSSE attestations per export artifact/target; expose `/exports/{id}/attestation`; integrate with CLI verify path. |
|
||||
| 7 | EXPORT-OBS-54-002 | DONE | Depends on EXPORT-OBS-54-001 and PROV-OBS-53-003. | Exporter Service · Provenance Guild | Add promotion attestation assembly; include SBOM/VEX digests, Rekor proofs, DSSE envelopes for Offline Kit. |
|
||||
| 8 | EXPORT-OBS-55-001 | DONE | Depends on EXPORT-OBS-54-001. | Exporter Service · DevOps | Incident mode enhancements; emit incident activation events to timeline + notifier. |
|
||||
| 9 | EXPORT-RISK-69-001 | TODO | Schema blockers resolved; AdvisoryAI evidence bundle schema available. | Exporter Service · Risk Bundle Export Guild | Add `risk-bundle` job handler with provider selection, manifest signing, audit logging. |
|
||||
| 10 | EXPORT-RISK-69-002 | TODO | Depends on EXPORT-RISK-69-001. | Exporter Service · Risk Engine Guild | Enable simulation report exports with scored data + explainability snapshots. |
|
||||
| 11 | EXPORT-RISK-70-001 | TODO | Depends on EXPORT-RISK-69-002. | Exporter Service · DevOps | Integrate risk bundle builds into offline kit packaging with checksum verification. |
|
||||
@@ -93,6 +93,14 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | **EXPORT-OBS-55-001 DONE:** Implemented incident mode enhancements for ExportCenter. Created `Incident/` namespace with: `ExportIncidentModels.cs` (severity levels Info→Emergency, status Active→Resolved→FalsePositive, types ExportFailure/LatencyDegradation/StorageCapacity/DependencyFailure/IntegrityIssue/SecurityIncident/ConfigurationError/RateLimiting), `ExportIncidentEvents.cs` (IncidentActivated/Updated/Escalated/Deescalated/Resolved events), `IExportIncidentManager` interface and `ExportIncidentManager` implementation with in-memory store. `IExportNotificationEmitter` interface with `LoggingNotificationEmitter` for timeline + notifier integration. Added `PublishIncidentEventAsync` to `IExportTimelinePublisher`. REST endpoints at `/v1/incidents/*`: GET status, GET active, GET recent, GET {id}, POST activate, PATCH {id} update, POST {id}/resolve. Added metrics: `export_incidents_activated_total`, `export_incidents_resolved_total`, `export_incidents_escalated_total`, `export_incidents_deescalated_total`, `export_notifications_emitted_total`, `export_incident_duration_seconds`. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-54-002 DONE:** Implemented promotion attestation assembly for Offline Kit delivery. Created `PromotionAttestationModels.cs` with models for SBOM/VEX digest references, Rekor proof entries (with inclusion proofs), DSSE envelope references, promotion predicates. Created `IPromotionAttestationAssembler` interface and `PromotionAttestationAssembler` implementation that: builds in-toto statements with promotion predicates, computes root hash from all artifact digests, signs with DSSE PAE encoding, exports to portable gzipped tar bundles with deterministic timestamps, includes verification scripts. Created `PromotionAttestationEndpoints.cs` with REST endpoints: `POST /v1/promotions/attestations`, `GET /v1/promotions/attestations/{id}`, `GET /v1/promotions/{promotionId}/attestations`, `POST /v1/promotions/attestations/{id}/verify`, `GET /v1/promotions/attestations/{id}/bundle`. Bundle export includes promotion-assembly.json, promotion.dsse.json, rekor-proofs.ndjson, envelopes/, checksums.txt, verify-promotion.sh. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-54-001 DONE:** Implemented DSSE attestation service for export artifacts. Created `Attestation/` namespace with `ExportAttestationModels.cs` (DSSE envelope, in-toto statement, predicates, subjects, verification info), `IExportAttestationService` interface, `ExportAttestationService` implementation. Created `IExportAttestationSigner` interface and `ExportAttestationSigner` implementing DSSE PAE (Pre-Authentication Encoding) per spec with ECDSA-P256-SHA256 signing. REST endpoints at `/v1/exports/{id}/attestation` (GET), `/v1/exports/attestations/{attestationId}` (GET), `/v1/exports/{id}/attestation/verify` (POST). Includes base64url encoding, key ID computation, public key PEM export for verification. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-53-001 DONE:** Implemented evidence locker integration for export manifests. Created `EvidenceLocker/` namespace with `ExportEvidenceModels` (manifest, entries, distribution info, DSSE signature models), `IExportEvidenceLockerClient` interface, `ExportEvidenceLockerClient` HTTP implementation, `ExportMerkleTreeCalculator` for deterministic root hash computation. In-memory client available for testing. Integrated with existing telemetry. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-52-001 DONE:** Implemented timeline event publisher for export lifecycle. Created `Timeline/` namespace with event types (`ExportStartedEvent`, `ExportCompletedEvent`, `ExportFailedEvent`, `ExportCancelledEvent`, `ExportArtifactCreatedEvent`), `IExportTimelinePublisher` interface, `ExportTimelinePublisher` implementation with hash-based deduplication and exponential backoff retry. Added timeline metrics (`export_timeline_events_published_total`, `export_timeline_events_failed_total`, `export_timeline_events_deduplicated_total`). Integrated with TimelineEventEnvelope format for TimelineIndexer. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-51-001 DONE:** Created Grafana dashboard (`deploy/telemetry/dashboards/export-center.json`) with panels for run counts, success rate, latency percentiles, artifact counts, bundle sizes, and error analysis. Created alert rules (`deploy/telemetry/alerts/export-center-alerts.yaml`) with SLO burn-rate alerts (14.4x fast/6x slow), latency alerts (p95>120s, p99>300s), capacity alerts, and deprecation tracking. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OBS-50-001 DONE:** Implemented telemetry core for ExportCenter. Created `Telemetry/` namespace with `ExportTelemetry` (Meter with counters/histograms), `ExportActivityExtensions` (ActivitySource spans), `ExportRunTelemetryContext` (lifecycle tracking), `ExportLoggerExtensions` (structured logging), and `TelemetryServiceCollectionExtensions` (DI). Metrics include `export_runs_total`, `export_run_duration_seconds`, `export_artifacts_total`, `export_bytes_total`, `export_bundle_size_bytes`. Spans: `export.run`, `export.plan`, `export.write`, `export.distribute`. | Implementer |
|
||||
| 2025-12-07 | **EXPORT-OAS-63-001 DONE:** Implemented RFC 8594 deprecation headers for legacy `/exports` endpoints. Created `Deprecation/` namespace with `DeprecationInfo`, `DeprecationHeaderExtensions`, `DeprecationRouteBuilderExtensions`, `DeprecatedEndpointsRegistry`, `DeprecationNotificationService`. Legacy endpoints `/exports` (GET/POST/DELETE) now emit `Deprecation`, `Sunset`, `Link`, and `Warning` headers. Metrics counter added for monitoring deprecated endpoint access. | Implementer |
|
||||
| 2025-12-07 | **Wave 10 unblock:** All 17 implementation tasks moved from BLOCKED → TODO. Schema blockers resolved: EvidenceLocker bundle spec (`docs/modules/evidence-locker/bundle-packaging.schema.json`), AdvisoryAI evidence bundle schema (`docs/events/advisoryai.evidence.bundle@1.schema.json`), and orchestrator envelope (`docs/schemas/orchestrator-envelope.schema.json`). | Implementer |
|
||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||
| 2025-11-20 | Published prep docs for EXPORT-OBS-50-001, EXPORT-RISK-69-001, EXPORT-SVC-35-001, EXPORT-SVC-35-002/003/004/005, EXPORT-NOTIFY-SCHEMA-OBS-52, EXPORT-CRYPTO-90-001, exporter-evid blocker; set P1–P9 to DOING after confirming unowned. | Project Mgmt |
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
| 8 | CVSS-CONCELIER-190-008 | DONE (2025-12-06) | Depends on 190-001; Concelier AGENTS updated 2025-12-06. | Concelier Guild · Policy Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Ingest vendor-provided CVSS v4.0 vectors from advisories; parse and store as base receipts; preserve provenance. (Implemented CVSS priority ordering in Advisory → Postgres conversion so v4 vectors are primary and provenance-preserved.) |
|
||||
| 9 | CVSS-API-190-009 | DONE (2025-12-06) | Depends on 190-005, 190-007; Policy Engine + Gateway CVSS endpoints shipped. | Policy Guild (`src/Policy/StellaOps.Policy.Gateway`) | REST APIs delivered: `POST /cvss/receipts`, `GET /cvss/receipts/{id}`, `PUT /cvss/receipts/{id}/amend`, `GET /cvss/receipts/{id}/history`, `GET /cvss/policies`. |
|
||||
| 10 | CVSS-CLI-190-010 | DONE (2025-12-06) | Depends on 190-009 (API readiness). | CLI Guild (`src/Cli/StellaOps.Cli`) | CLI verbs shipped: `stella cvss score --vuln <id> --policy-file <path> --vector <cvss4>`, `stella cvss show <receiptId>`, `stella cvss history <receiptId>`, `stella cvss export <receiptId> --format json`. |
|
||||
| 11 | CVSS-UI-190-011 | TODO | Depends on 190-009 (API readiness). | UI Guild (`src/UI/StellaOps.UI`) | UI components: Score badge with CVSS-BTE label, tabbed receipt viewer (Base/Threat/Environmental/Supplemental/Evidence/Policy/History), "Recalculate with my env" button, export options. |
|
||||
| 11 | CVSS-UI-190-011 | BLOCKED | UI workspace (`src/UI/StellaOps.UI`) is empty/no Angular project; UI tasks cannot start until workspace is restored or scaffolded. | UI Guild (`src/UI/StellaOps.UI`) | UI components: Score badge with CVSS-BTE label, tabbed receipt viewer (Base/Threat/Environmental/Supplemental/Evidence/Policy/History), "Recalculate with my env" button, export options. |
|
||||
| 12 | CVSS-DOCS-190-012 | BLOCKED (2025-11-29) | Depends on 190-001 through 190-011 (API/UI/CLI blocked). | Docs Guild (`docs/modules/policy/cvss-v4.md`, `docs/09_API_CLI_REFERENCE.md`) | Document CVSS v4.0 scoring system: data model, policy format, API reference, CLI usage, UI guide, determinism guarantees. |
|
||||
| 13 | CVSS-GAPS-190-013 | DONE (2025-12-01) | None; informs tasks 5–12. | Product Mgmt · Policy Guild | Address gap findings (CV1–CV10) from `docs/product-advisories/25-Nov-2025 - Add CVSS v4.0 Score Receipts for Transparency.md`: policy lifecycle/replay, canonical hashing spec with test vectors, threat/env freshness, tenant-scoped receipts, v3.1→v4.0 conversion flagging, evidence CAS/DSSE linkage, append-only receipt rules, deterministic exports, RBAC boundaries, monitoring/alerts for DSSE/policy drift. |
|
||||
| 14 | CVSS-GAPS-190-014 | DONE (2025-12-03) | Close CVM1–CVM10 from `docs/product-advisories/25-Nov-2025 - Add CVSS v4.0 Score Receipts for Transparency.md`; depends on schema/hash publication and API/UI contracts | Policy Guild · Platform Guild | Remediated CVM1–CVM10: updated `docs/modules/policy/cvss-v4.md` with canonical hashing/DSSE/export/profile guidance, added golden hash fixture under `tests/Policy/StellaOps.Policy.Scoring.Tests/Fixtures/hashing/`, and documented monitoring/backfill rules. |
|
||||
@@ -81,6 +81,7 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | CVSS-UI-190-011 set to BLOCKED: UI workspace `src/UI/StellaOps.UI` contains no Angular project (only AGENTS/TASKS stubs); cannot implement receipt UI until workspace is restored or scaffolded. | Implementer |
|
||||
| 2025-12-07 | System.CommandLine beta5 migration completed; CLI cvss verbs build/run with new API surface. NuGet fallback probing fully disabled via repo-local cache; full CLI build (with deps) now succeeds. Risk R7 mitigated. | Implementer |
|
||||
| 2025-12-07 | Cleared NuGet fallback probing of VS global cache; set repo-local package cache and explicit sources. Shared libraries build; CLI restore now succeeds but System.CommandLine API drift is blocking CLI build and needs follow-up alignment. | Implementer |
|
||||
| 2025-12-06 | CVSS-CLI-190-010 DONE: added CLI `cvss` verbs (score/show/history/export) targeting Policy Gateway CVSS endpoints; uses local vector parsing and policy hash; JSON export supported. | Implementer |
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
| 17 | MIRROR-CRT-56-CI-001 | DONE (2025-11-25) | None | Mirror Creator Guild · DevOps Guild | Move `make-thin-v1.sh` into CI assembler, enforce DSSE/TUF/time-anchor, publish milestone hashes. |
|
||||
| 18 | MIRROR-CRT-56-002 | DONE (2025-11-25) | Depends on 56-CI-001 | Mirror Creator Guild · Security Guild | Release signing for thin bundle v1 using `MIRROR_SIGN_KEY_B64`; run `.gitea/workflows/mirror-sign.yml`. |
|
||||
| 19 | MIRROR-CRT-57-001/002 | BLOCKED | Wait on 56-002 + AIRGAP-TIME-57-001 | Mirror Creator Guild · AirGap Time Guild | OCI/time-anchor signing follow-ons. |
|
||||
| 20 | MIRROR-CRT-58-001/002 | BLOCKED | Depends on 56-002 | Mirror Creator · CLI · Exporter Guilds | CLI/Export signing follow-ons. |
|
||||
| 20 | MIRROR-CRT-58-001/002 | DOING (dev) | Depends on 56-002 | Mirror Creator · CLI · Exporter Guilds | CLI/Export signing follow-ons; dev Export Center scheduling helper added, production signing still awaits `MIRROR_SIGN_KEY_B64`. |
|
||||
| 21 | EXPORT-OBS-51-001 / 54-001 / AIRGAP-TIME-57-001 / CLI-AIRGAP-56-001 / PROV-OBS-53-001 | BLOCKED | Need signed thin bundle + time anchors | Exporter · AirGap Time · CLI Guild | Export/airgap provenance chain work. |
|
||||
| 22 | DEVOPS-LEDGER-29-009-REL | BLOCKED (2025-11-25) | Needs LEDGER-29-009 dev outputs | DevOps Guild · Findings Ledger Guild | Release/offline-kit packaging for ledger manifests/backups. |
|
||||
| 23 | DEVOPS-LEDGER-TEN-48-001-REL | BLOCKED (2025-11-25) | Needs ledger tenant partition work | DevOps Guild · Findings Ledger Guild | Apply RLS/partition migrations in release pipelines; publish manifests/offline-kit artefacts. |
|
||||
@@ -52,6 +52,7 @@
|
||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||
| 2025-12-04 | Renamed from `SPRINT_506_ops_devops_iv.md` to template-compliant `SPRINT_0506_0001_0001_ops_devops_iv.md`; no status changes. | Project PM |
|
||||
| 2025-12-03 | Normalised sprint file to standard template; preserved all tasks/logs; no status changes. | Planning |
|
||||
| 2025-12-07 | MIRROR-CRT-58-002 progressed: added Export Center scheduling helper (`src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh`) for dev scheduling/audit; production signing still waiting on `MIRROR_SIGN_KEY_B64`. | Implementer |
|
||||
| 2025-11-25 | DEVOPS-CI-110-001 runner published at `ops/devops/ci-110-runner/`; initial TRX slices stored under `ops/devops/artifacts/ci-110/20251125T030557Z/`. | DevOps |
|
||||
| 2025-11-25 | MIRROR-CRT-56-CI-001 completed: CI signing script emits milestone hash summary, enforces DSSE/TUF/time-anchor steps, uploads `milestone.json` via `mirror-sign.yml`. | DevOps |
|
||||
| 2025-11-25 | DEVOPS-OBS-55-001 completed: added incident-mode automation script (`scripts/observability/incident-mode.sh`) and runbook (`ops/devops/observability/incident-mode.md`). | DevOps |
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
| 6 | KCMVP-01 | DONE (2025-12-07) | None | Security · Crypto | Provide KCMVP hash-only baseline (SHA-256) with labeling; add tests and profile docs. |
|
||||
| 7 | KCMVP-02 | BLOCKED (2025-12-06) | Licensed module | Security · Crypto | Add ARIA/SEED/KCDSA provider once certified toolchain available. |
|
||||
| 8 | PQ-IMPL-01 | DONE (2025-12-07) | Registry mapping (R3) to resolve | Crypto · Scanner | Implement `pq-dilithium3` and `pq-falcon512` providers via liboqs/oqs-provider; vendor libs for offline; add deterministic vectors. |
|
||||
| 9 | PQ-IMPL-02 | TODO | After #8 | Scanner · Attestor · Policy | Wire DSSE signing overrides, dual-sign toggles, deterministic regression tests across providers (Scanner/Attestor/Policy). |
|
||||
| 9 | PQ-IMPL-02 | DOING (2025-12-07) | After #8 | Scanner · Attestor · Policy | Wire DSSE signing overrides, dual-sign toggles, deterministic regression tests across providers (Scanner/Attestor/Policy). |
|
||||
| 10 | ROOTPACK-INTL-01 | DOING (2025-12-07) | After baseline tasks (1,4,6,8) | Ops · Docs | Build rootpack variants (us-fips baseline, eu baseline, korea hash-only, PQ addenda) with signed manifests/tests; clearly label certification gaps. |
|
||||
|
||||
## Execution Log
|
||||
@@ -40,6 +40,7 @@
|
||||
| 2025-12-07 | Added software compliance providers (`fips.ecdsa.soft`, `eu.eidas.soft`, `kr.kcmvp.hash`, `pq.soft`) with unit tests; set tasks 1 and 6 to DONE; 2,4,8,10 moved to DOING pending host wiring and certified modules. | Implementer |
|
||||
| 2025-12-07 | Drafted regional rootpacks (`etc/rootpack/us-fips`, `etc/rootpack/eu`, `etc/rootpack/kr`) including PQ soft provider; registry DI registers new providers. | Implementer |
|
||||
| 2025-12-07 | Added deterministic PQ test vectors (fixed keys/signatures) in `StellaOps.Cryptography.Tests`; PQ-IMPL-01 marked DONE. | Implementer |
|
||||
| 2025-12-07 | Wired Signer DSSE dual-sign (secondary PQ/SM allowed via options), fixed DI to provide ICryptoHmac, and adjusted SM2 test seeding; Signer test suite passing. Set PQ-IMPL-02 to DOING. | Implementer |
|
||||
|
||||
## Decisions & Risks
|
||||
- FIPS validation lead time may slip; interim non-certified baseline acceptable but must be clearly labeled until CMVP module lands (task 3).
|
||||
|
||||
@@ -41,18 +41,18 @@
|
||||
| P12 | PREP-EXPORT-OAS-62-001-DEPENDS-ON-61-002 | DONE (2025-11-20) | Prep artefact at `docs/modules/export-center/prep/2025-11-20-export-oas-62-001-prep.md`; depends on discovery endpoint. | Exporter Service Guild · SDK Generator Guild | Depends on 61-002. <br><br> Document artefact/deliverable for EXPORT-OAS-62-001 and publish location so downstream tasks can proceed. |
|
||||
| P13 | PREP-EXPORTER-SERVICE-EVIDENCELOCKER-GUILD-BL | DONE (2025-11-20) | Prep note at `docs/modules/export-center/prep/2025-11-20-exporter-evidencelocker-blocker.md`; awaiting sealed bundle schema/hash. | Planning | BLOCKED (awaits EvidenceLocker contract). <br><br> Document artefact/deliverable for Exporter Service · EvidenceLocker Guild and publish location so downstream tasks can proceed. |
|
||||
| P14 | PREP-ORCHESTRATOR-NOTIFICATIONS-SCHEMA-HANDOF | DONE (2025-11-20) | Prep note at `docs/events/prep/2025-11-20-orchestrator-notifications-schema-handoff.md`. | Planning | If not ready, keep tasks BLOCKED and escalate to Wave 150/140 leads. <br><br> Document artefact/deliverable for Orchestrator + Notifications schema handoff and publish location so downstream tasks can proceed. |
|
||||
| 1 | DVOFF-64-002 | TODO | EvidenceLocker bundle spec delivered (`docs/modules/evidence-locker/bundle-packaging.schema.json`); ready to implement. | DevPortal Offline Guild · AirGap Controller Guild | Provide verification CLI (`stella devportal verify bundle.tgz`) ensuring integrity before import. |
|
||||
| 2 | EXPORT-AIRGAP-56-001 | TODO | EvidenceLocker + AdvisoryAI schemas delivered; ready to implement. | Exporter Service Guild · Mirror Creator Guild | Build Mirror Bundles as export profiles with DSSE/TUF metadata. |
|
||||
| 3 | EXPORT-AIRGAP-56-002 | TODO | Depends on 56-001; chain unblocked. | Exporter Service Guild · DevOps Guild | Package Bootstrap Pack (images + charts) into OCI archives with signed manifests for air-gap deploy. |
|
||||
| 4 | EXPORT-AIRGAP-57-001 | TODO | Depends on 56-002; EvidenceLocker bundle format available. | Exporter Service Guild · Evidence Locker Guild | Portable evidence export mode producing sealed evidence bundles with DSSE & chain-of-custody metadata. |
|
||||
| 5 | EXPORT-AIRGAP-58-001 | TODO | Depends on 57-001; orchestrator envelope schema delivered. | Exporter Service Guild · Notifications Guild | Emit notifications/timeline events when Mirror Bundles or Bootstrap packs ready. |
|
||||
| 6 | EXPORT-ATTEST-74-001 | TODO | EvidenceLocker bundle spec delivered; ready to implement. | Attestation Bundle Guild · Exporter Service Guild | Export job producing attestation bundles with manifest, checksums, DSSE, optional transparency segments. |
|
||||
| 7 | EXPORT-ATTEST-74-002 | TODO | Depends on 74-001; chain unblocked. | Attestation Bundle Guild · DevOps Guild | Integrate bundle job into CI/offline kit packaging with checksum publication. |
|
||||
| 8 | EXPORT-ATTEST-75-001 | TODO | Depends on 74-002; chain unblocked. | Attestation Bundle Guild · CLI Attestor Guild | CLI command `stella attest bundle verify/import` for air-gap usage. |
|
||||
| 9 | EXPORT-ATTEST-75-002 | TODO | Depends on 75-001; chain unblocked. | Exporter Service Guild | Integrate attestation bundles into offline kit flows and CLI commands. |
|
||||
| 10 | EXPORT-OAS-61-001 | TODO | Export API surface now defined; ready to implement OAS. | Exporter Service Guild · API Contracts Guild | Update Exporter OAS covering profiles/runs/downloads with standard error envelope + examples. |
|
||||
| 11 | EXPORT-OAS-61-002 | TODO | Depends on 61-001; chain unblocked. | Exporter Service Guild | `/.well-known/openapi` discovery endpoint with version metadata and ETag. |
|
||||
| 12 | EXPORT-OAS-62-001 | TODO | Depends on 61-002; chain unblocked. | Exporter Service Guild · SDK Generator Guild | Ensure SDKs include export profile/run clients with streaming helpers; add smoke tests. |
|
||||
| 1 | DVOFF-64-002 | DONE | CLI command implemented with service, tests, and exit codes per spec. | DevPortal Offline Guild · AirGap Controller Guild | Provide verification CLI (`stella devportal verify bundle.tgz`) ensuring integrity before import. |
|
||||
| 2 | EXPORT-AIRGAP-56-001 | DONE | Mirror bundle builder with DSSE signing implemented; tests added. | Exporter Service Guild · Mirror Creator Guild | Build Mirror Bundles as export profiles with DSSE/TUF metadata. |
|
||||
| 3 | EXPORT-AIRGAP-56-002 | DONE | Bootstrap pack builder with OCI layout implemented; tests added. | Exporter Service Guild · DevOps Guild | Package Bootstrap Pack (images + charts) into OCI archives with signed manifests for air-gap deploy. |
|
||||
| 4 | EXPORT-AIRGAP-57-001 | DONE | Portable evidence export builder implemented; tests added. | Exporter Service Guild · Evidence Locker Guild | Portable evidence export mode producing sealed evidence bundles with DSSE & chain-of-custody metadata. |
|
||||
| 5 | EXPORT-AIRGAP-58-001 | DONE | Notification emitter with NATS sink, webhook delivery, HMAC-SHA256 signing, retry logic, and DLQ implemented. | Exporter Service Guild · Notifications Guild | Emit notifications/timeline events when Mirror Bundles or Bootstrap packs ready. |
|
||||
| 6 | EXPORT-ATTEST-74-001 | DONE | Attestation bundle builder with DSSE envelope passthrough, transparency log support, deterministic packaging implemented. | Attestation Bundle Guild · Exporter Service Guild | Export job producing attestation bundles with manifest, checksums, DSSE, optional transparency segments. |
|
||||
| 7 | EXPORT-ATTEST-74-002 | DONE | OfflineKitPackager with immutable artefacts, checksum publication, manifest generation implemented. | Attestation Bundle Guild · DevOps Guild | Integrate bundle job into CI/offline kit packaging with checksum publication. |
|
||||
| 8 | EXPORT-ATTEST-75-001 | DONE | CLI verifier/importer with DSSE validation, checksum verification, transparency checks implemented. | Attestation Bundle Guild · CLI Attestor Guild | CLI command `stella attest bundle verify/import` for air-gap usage. |
|
||||
| 9 | EXPORT-ATTEST-75-002 | DONE | OfflineKitDistributor with mirror publication, manifest-offline.json generation, and bit-for-bit verification implemented. | Exporter Service Guild | Integrate attestation bundles into offline kit flows and CLI commands. |
|
||||
| 10 | EXPORT-OAS-61-001 | DONE | OpenAPI v1 spec published with deterministic examples, ETag/versioning, and standard error envelopes. | Exporter Service Guild · API Contracts Guild | Update Exporter OAS covering profiles/runs/downloads with standard error envelope + examples. |
|
||||
| 11 | EXPORT-OAS-61-002 | DONE | Discovery endpoint implemented with ETag, If-None-Match, Cache-Control headers. | Exporter Service Guild | `/.well-known/openapi` discovery endpoint with version metadata and ETag. |
|
||||
| 12 | EXPORT-OAS-62-001 | DONE | SDK client project with interface, implementation, streaming/lifecycle helpers, and smoke tests. | Exporter Service Guild · SDK Generator Guild | Ensure SDKs include export profile/run clients with streaming helpers; add smoke tests. |
|
||||
| 13 | EXPORT-GAPS-162-013 | DONE (2025-12-04) | None; informs tasks 1–12. | Product Mgmt · Exporter Guild · Evidence Locker Guild | Address EC1–EC10 from `docs/product-advisories/28-Nov-2025 - Export Center and Reporting Strategy.md`: publish signed ExportProfile + manifest schemas with selector validation; define per-adapter determinism rules + rerun-hash CI; mandate DSSE/SLSA attestation with log metadata; enforce cross-tenant approval flow; require distribution integrity headers + OCI annotations; pin Trivy schema versions; formalize mirror delta/tombstone rules; document encryption/recipient policy; set quotas/backpressure; and produce offline export kit + verify script under `docs/modules/export-center/determinism.md` with fixtures in `src/ExportCenter/__fixtures`. |
|
||||
|
||||
## Action Tracker
|
||||
@@ -98,6 +98,17 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-07 | Completed EXPORT-OAS-62-001: implemented ExportCenter SDK client in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/` with: IExportCenterClient interface for discovery, profiles, runs, evidence exports, and attestation exports operations; ExportCenterClient implementation with HttpClient using System.Net.Http.Json; ExportCenterClientOptions for configuration; Models (ExportModels.cs) with ExportProfile, ExportRun, ExportStatus, ErrorEnvelope types; Lifecycle/ExportJobLifecycleHelper for polling with CreateAndWait, WaitForCompletion, and download helpers; Streaming/ExportDownloadHelper for progress reporting, SHA-256 verification, and byte counting; Extensions/ServiceCollectionExtensions for DI registration. Added comprehensive smoke tests in Client.Tests project covering HTTP mocking, lifecycle polling, and download verification. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-OAS-61-002: implemented OpenApiDiscoveryEndpoints in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/` with: `/.well-known/openapi` and `/.well-known/openapi.json` discovery endpoints returning service metadata (version, specVersion, format, url, profilesSupported), `/openapi/export-center.yaml` serving the OpenAPI spec (embedded resource or file fallback with minimal inline spec), `/openapi/export-center.json` with redirect to YAML endpoint, ETag with SHA-256 hash computation, If-None-Match support returning 304 Not Modified, Cache-Control (public, max-age=300), X-Export-Oas-Version and Last-Modified headers, OpenApiDiscoveryResponse model with camelCase JSON serialization. Updated Program.cs to wire up discovery endpoints. Added unit tests in OpenApiDiscoveryEndpointsTests.cs. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-OAS-61-001: published OpenAPI v1 spec at `docs/modules/export-center/openapi/export-center.v1.yaml` with: `/.well-known/openapi` discovery endpoint, evidence export endpoints (POST create, GET status, GET download), attestation export endpoints (POST create, GET status, GET download), profiles/runs listing with pagination, deterministic examples using fixed timestamps (2025-01-01T00:00:00Z) and placeholder hashes, ETag/Last-Modified/Cache-Control headers, OAuth2 bearer + mTLS security, standard ErrorEnvelope with correlationId, X-Stella-Quota-* headers. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-ATTEST-75-002: implemented OfflineKitDistributor in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/` with: bit-for-bit distribution to mirror locations (mirror/export/attestations/{version}/), manifest-offline.json generation with entries for attestation/mirror/bootstrap bundles, CLI examples and import commands, manifest checksum publication, verification to ensure distributed kit matches source. Added comprehensive tests in `OfflineKitDistributorTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-ATTEST-75-001: implemented AttestationBundleVerifier in `src/Cli/StellaOps.Cli/Services/` (IAttestationBundleVerifier.cs, AttestationBundleVerifier.cs, Models/AttestationBundleModels.cs) with: archive extraction and checksum verification (internal + external), DSSE envelope payload validation, transparency.ndjson requirement check (non-offline mode), metadata extraction with subject digests, exit codes per spec (0=success, 2=checksum, 3=signature, 4=transparency, 5=format, 6=notfound, 7=import). Added comprehensive tests in `AttestationBundleVerifierTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-ATTEST-74-002: implemented OfflineKitPackager in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineKit/` (OfflineKitModels.cs, OfflineKitPackager.cs) with: immutable artefact placement with write-once semantics, checksum publication in `{hash} {filename}` format, manifest.json generation with version/kitId/entries, directory structure per spec (attestations/, mirrors/, bootstrap/, checksums/). Added comprehensive tests in `OfflineKitPackagerTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-ATTEST-74-001: implemented AttestationBundleBuilder in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/AttestationBundle/` (AttestationBundleModels.cs, AttestationBundleBuilder.cs) with: DSSE envelope passthrough (bit-for-bit copy), statement extraction, optional transparency.ndjson (sorted lexically), metadata.json with subject digests, checksums.txt, verify-attestation.sh (POSIX offline script). Added comprehensive tests in `AttestationBundleBuilderTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-AIRGAP-58-001: implemented ExportNotificationEmitter in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Notifications/` (ExportNotificationModels.cs, ExportNotificationEmitter.cs, ExportWebhookClient.cs) with: NATS sink abstraction, webhook delivery with HMAC-SHA256 PAE signing, exponential backoff retry (1s/2s/4s/8s/16s, max 5 attempts), DLQ routing for failed deliveries, in-memory test doubles. Added comprehensive tests in `ExportNotificationEmitterTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-AIRGAP-57-001: implemented PortableEvidenceExportBuilder in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/PortableEvidence/` (models, builder). Added comprehensive tests in `PortableEvidenceExportBuilderTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-AIRGAP-56-002: implemented BootstrapPackBuilder with OCI image layout in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/BootstrapPack/` (models, builder). Added comprehensive tests in `BootstrapPackBuilderTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | Completed EXPORT-AIRGAP-56-001: implemented MirrorBundleBuilder with DSSE signing in `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/MirrorBundle/` (models, builder, signing). Added comprehensive tests in `MirrorBundleBuilderTests.cs` and `MirrorBundleSigningTests.cs`. Status set to DONE. | Implementer |
|
||||
| 2025-12-07 | **Wave 10 unblock:** EvidenceLocker bundle spec (`docs/modules/evidence-locker/bundle-packaging.schema.json`) and AdvisoryAI evidence bundle schema (`docs/events/advisoryai.evidence.bundle@1.schema.json`) delivered. All 12 implementation tasks (DVOFF-64-002, EXPORT-AIRGAP-56/57/58, EXPORT-ATTEST-74/75, EXPORT-OAS-61/62) moved from BLOCKED → TODO. Interlocks updated. | Implementer |
|
||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||
| 2025-11-20 | Completed PREP-EXPORT-AIRGAP-58-001: published notification/timeline contract for air-gap export readiness (`docs/modules/export-center/prep/2025-11-20-export-airgap-58-001-prep.md`); status set to DONE. | Implementer |
|
||||
@@ -1196,7 +1196,7 @@
|
||||
| MIRROR-CRT-57-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | OCI/time-anchor workstreams blocked pending assembler + time contract. | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 |
|
||||
| MIRROR-CRT-57-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | CLI + Export automation depends on assembler and DSSE/TUF track. | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-002 | DOING | 2025-12-07 | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 |
|
||||
| MTLS-11-002 | DONE | 2025-11-08 | SPRINT_100_identity_signing | Authority Core & Security Guild | src/Authority/StellaOps.Authority | Refresh grants enforce original client cert, tokens persist `x5t#S256` metadata, docs updated. | AUTH-DPOP-11-001 | AUIN0102 |
|
||||
| NATIVE-401-015 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | Scanner Worker Guild | `src/Scanner/__Libraries/StellaOps.Scanner.Symbols.Native`, `src/Scanner/__Libraries/StellaOps.Scanner.CallGraph.Native` | Bootstrap Symbols.Native + CallGraph.Native scaffolding and coverage fixtures. | Needs replay requirements from DORR0101 | SCNA0101 |
|
||||
| NOTIFY-38-001 | TODO | | SPRINT_0214_0001_0001_web_iii | BE-Base Platform Guild | src/Web/StellaOps.Web | Route approval/rule APIs through Web gateway with tenant scopes. | Wait for NOTY0103 approval payload schema | NOWB0101 |
|
||||
@@ -3414,7 +3414,7 @@
|
||||
| MIRROR-CRT-57-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | OCI/time-anchor workstreams blocked pending assembler + time contract. | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 |
|
||||
| MIRROR-CRT-57-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · AirGap Time Guild | | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | MIRROR-CRT-56-001; AIRGAP-TIME-CONTRACT-1501; AIRGAP-TIME-57-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-001 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | CLI + Export automation depends on assembler and DSSE/TUF track. | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-002 | TODO | | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 |
|
||||
| MIRROR-CRT-58-002 | DOING | 2025-12-07 | SPRINT_0506_0001_0001_ops_devops_iv | Mirror Creator Guild · CLI Guild · Exporter Guild | | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | MIRROR-CRT-56-001; EXPORT-OBS-54-001; CLI-AIRGAP-56-001 | ATMI0101 |
|
||||
| MTLS-11-002 | DONE | 2025-11-08 | SPRINT_100_identity_signing | Authority Core & Security Guild | src/Authority/StellaOps.Authority | Refresh grants enforce original client cert, tokens persist `x5t#S256` metadata, docs updated. | AUTH-DPOP-11-001 | AUIN0102 |
|
||||
| NATIVE-401-015 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | Scanner Worker Guild | `src/Scanner/__Libraries/StellaOps.Scanner.Symbols.Native`, `src/Scanner/__Libraries/StellaOps.Scanner.CallGraph.Native` | Bootstrap Symbols.Native + CallGraph.Native scaffolding and coverage fixtures. | Needs replay requirements from DORR0101 | SCNA0101 |
|
||||
| NOTIFY-38-001 | TODO | | SPRINT_0214_0001_0001_web_iii | BE-Base Platform Guild | src/Web/StellaOps.Web | Route approval/rule APIs through Web gateway with tenant scopes. | Wait for NOTY0103 approval payload schema | NOWB0101 |
|
||||
|
||||
663
docs/modules/export-center/openapi/export-center.v1.yaml
Normal file
663
docs/modules/export-center/openapi/export-center.v1.yaml
Normal file
@@ -0,0 +1,663 @@
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: StellaOps ExportCenter API
|
||||
version: 1.0.0
|
||||
description: >-
|
||||
Export profiles, runs, and deterministic bundle downloads for air-gap and offline deployments.
|
||||
Supports attestation exports, mirror bundles, bootstrap packs, and portable evidence bundles.
|
||||
contact:
|
||||
name: StellaOps Exporter Service Guild
|
||||
x-stella-oas-revision: '2025-12-07'
|
||||
servers:
|
||||
- url: https://{env}.export.api.stellaops.local
|
||||
description: Default environment-scoped host
|
||||
variables:
|
||||
env:
|
||||
default: prod
|
||||
enum: [dev, staging, prod, airgap]
|
||||
- url: https://export.{region}.offline.bundle
|
||||
description: Offline bundle host for air-gapped deployments
|
||||
variables:
|
||||
region:
|
||||
default: local
|
||||
enum: [local]
|
||||
security:
|
||||
- bearerAuth: []
|
||||
- mTLS: []
|
||||
paths:
|
||||
/.well-known/openapi:
|
||||
get:
|
||||
summary: OpenAPI discovery endpoint
|
||||
operationId: getOpenApiDiscovery
|
||||
tags: [discovery]
|
||||
security: []
|
||||
responses:
|
||||
'200':
|
||||
description: OpenAPI specification document
|
||||
headers:
|
||||
ETag:
|
||||
description: SHA-256 hash of the OAS document
|
||||
schema:
|
||||
type: string
|
||||
example: '"sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"'
|
||||
X-Export-Oas-Version:
|
||||
description: OAS version identifier
|
||||
schema:
|
||||
type: string
|
||||
example: 'v1'
|
||||
Last-Modified:
|
||||
description: OAS document last modification time
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
example: '2025-01-01T00:00:00Z'
|
||||
Cache-Control:
|
||||
description: Cache directive
|
||||
schema:
|
||||
type: string
|
||||
example: 'private, must-revalidate'
|
||||
content:
|
||||
application/yaml:
|
||||
schema:
|
||||
type: string
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
|
||||
/v1/exports/profiles:
|
||||
get:
|
||||
summary: List available export profiles
|
||||
operationId: listExportProfiles
|
||||
tags: [profiles]
|
||||
parameters:
|
||||
- name: kind
|
||||
in: query
|
||||
description: Filter by profile kind
|
||||
schema:
|
||||
type: string
|
||||
enum: [attestation, mirror, bootstrap, airgap-evidence]
|
||||
- name: limit
|
||||
in: query
|
||||
description: Maximum number of profiles to return
|
||||
schema:
|
||||
type: integer
|
||||
default: 50
|
||||
maximum: 200
|
||||
- name: cursor
|
||||
in: query
|
||||
description: Pagination cursor from previous response
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: List of export profiles
|
||||
headers:
|
||||
X-Stella-Quota-Remaining:
|
||||
schema:
|
||||
type: integer
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportProfilePage'
|
||||
example:
|
||||
profiles:
|
||||
- id: 'profile-attestation-v1'
|
||||
kind: 'attestation'
|
||||
description: 'Export attestation bundles with DSSE envelopes'
|
||||
version: 'v1'
|
||||
retentionDays: 90
|
||||
- id: 'profile-mirror-full'
|
||||
kind: 'mirror'
|
||||
description: 'Full mirror bundle with all advisories'
|
||||
version: 'v1'
|
||||
retentionDays: 365
|
||||
cursor: null
|
||||
hasMore: false
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
|
||||
/v1/exports/runs:
|
||||
get:
|
||||
summary: List export runs
|
||||
operationId: listExportRuns
|
||||
tags: [runs]
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
- name: profileId
|
||||
in: query
|
||||
description: Filter by export profile
|
||||
schema:
|
||||
type: string
|
||||
- name: status
|
||||
in: query
|
||||
description: Filter by status
|
||||
schema:
|
||||
type: string
|
||||
enum: [pending, running, completed, failed]
|
||||
- name: limit
|
||||
in: query
|
||||
schema:
|
||||
type: integer
|
||||
default: 50
|
||||
maximum: 200
|
||||
- name: cursor
|
||||
in: query
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: List of export runs
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportRunPage'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
|
||||
/v1/exports/airgap/evidence/{bundleId}:
|
||||
post:
|
||||
summary: Create portable evidence export
|
||||
operationId: createEvidenceExport
|
||||
tags: [evidence]
|
||||
parameters:
|
||||
- name: bundleId
|
||||
in: path
|
||||
required: true
|
||||
description: Source evidence bundle identifier
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'202':
|
||||
description: Export request accepted
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportStatus'
|
||||
example:
|
||||
exportId: '01234567-89ab-cdef-0123-456789abcdef'
|
||||
profileId: 'profile-airgap-evidence-v1'
|
||||
status: 'pending'
|
||||
bundleId: 'fedcba98-7654-3210-fedc-ba9876543210'
|
||||
createdAt: '2025-01-01T00:00:00Z'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
'429':
|
||||
$ref: '#/components/responses/RateLimited'
|
||||
|
||||
/v1/exports/airgap/evidence/{exportId}:
|
||||
get:
|
||||
summary: Get evidence export status
|
||||
operationId: getEvidenceExportStatus
|
||||
tags: [evidence]
|
||||
parameters:
|
||||
- name: exportId
|
||||
in: path
|
||||
required: true
|
||||
description: Export run identifier
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'200':
|
||||
description: Export status
|
||||
headers:
|
||||
ETag:
|
||||
description: Status document hash
|
||||
schema:
|
||||
type: string
|
||||
Last-Modified:
|
||||
description: Status last update time
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportStatus'
|
||||
example:
|
||||
exportId: '01234567-89ab-cdef-0123-456789abcdef'
|
||||
profileId: 'profile-airgap-evidence-v1'
|
||||
status: 'completed'
|
||||
bundleId: 'fedcba98-7654-3210-fedc-ba9876543210'
|
||||
artifactSha256: 'sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'
|
||||
rootHash: 'sha256:fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210'
|
||||
portableVersion: 'v1'
|
||||
createdAt: '2025-01-01T00:00:00Z'
|
||||
completedAt: '2025-01-01T00:01:00Z'
|
||||
downloadUri: '/v1/exports/airgap/evidence/01234567-89ab-cdef-0123-456789abcdef/download'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
/v1/exports/airgap/evidence/{exportId}/download:
|
||||
get:
|
||||
summary: Download evidence export bundle
|
||||
operationId: downloadEvidenceExport
|
||||
tags: [evidence]
|
||||
parameters:
|
||||
- name: exportId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'200':
|
||||
description: Portable evidence bundle archive
|
||||
headers:
|
||||
ETag:
|
||||
description: Archive SHA-256 hash
|
||||
schema:
|
||||
type: string
|
||||
example: '"sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"'
|
||||
Last-Modified:
|
||||
description: Archive creation time
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
Content-Disposition:
|
||||
description: Suggested filename
|
||||
schema:
|
||||
type: string
|
||||
example: 'attachment; filename="export-portable-bundle-v1.tgz"'
|
||||
Cache-Control:
|
||||
schema:
|
||||
type: string
|
||||
example: 'private, must-revalidate'
|
||||
content:
|
||||
application/gzip:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
/v1/exports/attestations/{attestationId}:
|
||||
post:
|
||||
summary: Create attestation export
|
||||
operationId: createAttestationExport
|
||||
tags: [attestations]
|
||||
parameters:
|
||||
- name: attestationId
|
||||
in: path
|
||||
required: true
|
||||
description: Source attestation identifier
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'202':
|
||||
description: Export request accepted
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportStatus'
|
||||
example:
|
||||
exportId: '11111111-1111-1111-1111-111111111111'
|
||||
profileId: 'profile-attestation-v1'
|
||||
status: 'pending'
|
||||
attestationId: '22222222-2222-2222-2222-222222222222'
|
||||
createdAt: '2025-01-01T00:00:00Z'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
'429':
|
||||
$ref: '#/components/responses/RateLimited'
|
||||
|
||||
/v1/exports/attestations/{exportId}:
|
||||
get:
|
||||
summary: Get attestation export status
|
||||
operationId: getAttestationExportStatus
|
||||
tags: [attestations]
|
||||
parameters:
|
||||
- name: exportId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'200':
|
||||
description: Export status
|
||||
headers:
|
||||
ETag:
|
||||
schema:
|
||||
type: string
|
||||
Last-Modified:
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ExportStatus'
|
||||
example:
|
||||
exportId: '11111111-1111-1111-1111-111111111111'
|
||||
profileId: 'profile-attestation-v1'
|
||||
status: 'completed'
|
||||
attestationId: '22222222-2222-2222-2222-222222222222'
|
||||
artifactSha256: 'sha256:abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789'
|
||||
rootHash: 'sha256:9876543210fedcba9876543210fedcba9876543210fedcba9876543210fedcba'
|
||||
statementDigest: 'sha256:1111111111111111111111111111111111111111111111111111111111111111'
|
||||
createdAt: '2025-01-01T00:00:00Z'
|
||||
completedAt: '2025-01-01T00:01:00Z'
|
||||
downloadUri: '/v1/exports/attestations/11111111-1111-1111-1111-111111111111/download'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
/v1/exports/attestations/{exportId}/download:
|
||||
get:
|
||||
summary: Download attestation export bundle
|
||||
operationId: downloadAttestationExport
|
||||
tags: [attestations]
|
||||
parameters:
|
||||
- name: exportId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'200':
|
||||
description: Attestation bundle archive
|
||||
headers:
|
||||
ETag:
|
||||
description: Archive SHA-256 hash
|
||||
schema:
|
||||
type: string
|
||||
example: '"sha256:abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789"'
|
||||
Last-Modified:
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
Content-Disposition:
|
||||
schema:
|
||||
type: string
|
||||
example: 'attachment; filename="export-attestation-bundle-v1.tgz"'
|
||||
Cache-Control:
|
||||
schema:
|
||||
type: string
|
||||
example: 'private, must-revalidate'
|
||||
content:
|
||||
application/gzip:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
/v1/exports/runs/{exportId}/events:
|
||||
get:
|
||||
summary: Get export run events (stub)
|
||||
operationId: getExportRunEvents
|
||||
tags: [runs]
|
||||
x-stub: true
|
||||
description: >-
|
||||
Timeline/event stream pointer for export run progress. Returns pointer to
|
||||
notification/event stream when notifications are enabled. Stub until event
|
||||
envelopes fully land.
|
||||
parameters:
|
||||
- name: exportId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- $ref: '#/components/parameters/TenantId'
|
||||
responses:
|
||||
'200':
|
||||
description: Event stream reference
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
exportId:
|
||||
type: string
|
||||
format: uuid
|
||||
eventStreamUri:
|
||||
type: string
|
||||
format: uri
|
||||
status:
|
||||
type: string
|
||||
enum: [available, not-configured]
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
components:
|
||||
securitySchemes:
|
||||
bearerAuth:
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
description: OAuth2 access token with export scopes
|
||||
mTLS:
|
||||
type: mutualTLS
|
||||
description: Mutual TLS client certificate authentication
|
||||
|
||||
parameters:
|
||||
TenantId:
|
||||
name: X-Stella-Tenant-Id
|
||||
in: header
|
||||
required: true
|
||||
description: Tenant identifier for multi-tenant scoping
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
|
||||
schemas:
|
||||
ExportProfile:
|
||||
type: object
|
||||
required: [id, kind, description, version, retentionDays]
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
description: Unique profile identifier
|
||||
example: 'profile-attestation-v1'
|
||||
kind:
|
||||
type: string
|
||||
enum: [attestation, mirror, bootstrap, airgap-evidence]
|
||||
description: Profile type
|
||||
description:
|
||||
type: string
|
||||
description: Human-readable profile description
|
||||
version:
|
||||
type: string
|
||||
description: Profile schema version
|
||||
example: 'v1'
|
||||
retentionDays:
|
||||
type: integer
|
||||
description: Number of days exports are retained
|
||||
example: 90
|
||||
|
||||
ExportProfilePage:
|
||||
type: object
|
||||
required: [profiles, hasMore]
|
||||
properties:
|
||||
profiles:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ExportProfile'
|
||||
cursor:
|
||||
type: string
|
||||
nullable: true
|
||||
description: Pagination cursor for next page
|
||||
hasMore:
|
||||
type: boolean
|
||||
description: Whether more results are available
|
||||
|
||||
ExportStatus:
|
||||
type: object
|
||||
required: [exportId, profileId, status, createdAt]
|
||||
properties:
|
||||
exportId:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Unique export run identifier
|
||||
profileId:
|
||||
type: string
|
||||
description: Associated export profile
|
||||
status:
|
||||
type: string
|
||||
enum: [pending, running, completed, failed]
|
||||
description: Current export status
|
||||
artifactSha256:
|
||||
type: string
|
||||
nullable: true
|
||||
description: SHA-256 hash of the exported artifact
|
||||
example: 'sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'
|
||||
rootHash:
|
||||
type: string
|
||||
nullable: true
|
||||
description: Merkle root hash of bundle contents
|
||||
example: 'sha256:fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210'
|
||||
portableVersion:
|
||||
type: string
|
||||
nullable: true
|
||||
description: Portable bundle format version
|
||||
attestationId:
|
||||
type: string
|
||||
format: uuid
|
||||
nullable: true
|
||||
description: Source attestation identifier (for attestation exports)
|
||||
bundleId:
|
||||
type: string
|
||||
format: uuid
|
||||
nullable: true
|
||||
description: Source bundle identifier (for evidence exports)
|
||||
statementDigest:
|
||||
type: string
|
||||
nullable: true
|
||||
description: SHA-256 of in-toto statement (for attestation exports)
|
||||
createdAt:
|
||||
type: string
|
||||
format: date-time
|
||||
description: Export creation timestamp (ISO 8601)
|
||||
example: '2025-01-01T00:00:00Z'
|
||||
completedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
nullable: true
|
||||
description: Export completion timestamp (ISO 8601)
|
||||
downloadUri:
|
||||
type: string
|
||||
format: uri
|
||||
nullable: true
|
||||
description: Relative URI for downloading the export artifact
|
||||
|
||||
ExportRunPage:
|
||||
type: object
|
||||
required: [runs, hasMore]
|
||||
properties:
|
||||
runs:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ExportStatus'
|
||||
cursor:
|
||||
type: string
|
||||
nullable: true
|
||||
hasMore:
|
||||
type: boolean
|
||||
|
||||
ErrorEnvelope:
|
||||
type: object
|
||||
required: [error]
|
||||
properties:
|
||||
error:
|
||||
type: object
|
||||
required: [code, message, correlationId]
|
||||
properties:
|
||||
code:
|
||||
type: string
|
||||
description: Machine-readable error code
|
||||
example: 'EXPORT_NOT_FOUND'
|
||||
message:
|
||||
type: string
|
||||
description: Human-readable error message
|
||||
example: 'Export with the specified ID was not found'
|
||||
correlationId:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Request correlation ID for tracing
|
||||
retryAfterSeconds:
|
||||
type: integer
|
||||
nullable: true
|
||||
description: Suggested retry delay for rate-limited requests
|
||||
|
||||
responses:
|
||||
Unauthorized:
|
||||
description: Authentication required or invalid credentials
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
example:
|
||||
error:
|
||||
code: 'UNAUTHORIZED'
|
||||
message: 'Valid authentication credentials required'
|
||||
correlationId: '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
NotFound:
|
||||
description: Resource not found
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
example:
|
||||
error:
|
||||
code: 'NOT_FOUND'
|
||||
message: 'The requested resource was not found'
|
||||
correlationId: '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
RateLimited:
|
||||
description: Rate limit exceeded
|
||||
headers:
|
||||
X-Stella-Quota-Remaining:
|
||||
schema:
|
||||
type: integer
|
||||
example: 0
|
||||
Retry-After:
|
||||
schema:
|
||||
type: integer
|
||||
example: 60
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
example:
|
||||
error:
|
||||
code: 'RATE_LIMITED'
|
||||
message: 'Rate limit exceeded. Please retry after the specified delay.'
|
||||
correlationId: '00000000-0000-0000-0000-000000000000'
|
||||
retryAfterSeconds: 60
|
||||
|
||||
tags:
|
||||
- name: discovery
|
||||
description: OpenAPI discovery and metadata
|
||||
- name: profiles
|
||||
description: Export profile management
|
||||
- name: runs
|
||||
description: Export run management and status
|
||||
- name: evidence
|
||||
description: Portable evidence bundle exports
|
||||
- name: attestations
|
||||
description: Attestation bundle exports
|
||||
@@ -7,5 +7,6 @@
|
||||
- `verify_oci_layout.py`: validates OCI layout/index/manifest and blob digests when `OCI=1` is used.
|
||||
- `mirror-create.sh`: convenience wrapper to build + verify thin bundles (optional SIGN_KEY, time anchor, OCI flag).
|
||||
- `mirror-verify.sh`: wrapper around `verify_thin_bundle.py` for quick hash/DSSE checks.
|
||||
- `schedule-export-center-run.sh`: schedules an Export Center run for mirror bundles via HTTP POST; set `EXPORT_CENTER_BASE_URL`, `EXPORT_CENTER_TENANT`, `EXPORT_CENTER_TOKEN` (Bearer), optional `EXPORT_CENTER_PROJECT`; logs to `AUDIT_LOG_PATH` (default `logs/export-center-schedule.log`).
|
||||
|
||||
Artifacts live under `out/mirror/thin/`.
|
||||
|
||||
@@ -235,6 +235,10 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
|
||||
}
|
||||
|
||||
var privateKeyBytes = LoadSm2KeyBytes(key);
|
||||
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["source"] = "config"
|
||||
};
|
||||
var signingKey = new CryptoSigningKey(
|
||||
new CryptoKeyReference(providerKeyId, providerName),
|
||||
normalizedAlgorithm,
|
||||
|
||||
@@ -120,6 +120,95 @@ public sealed class AttestorVerificationServiceTests
|
||||
Assert.Equal("missing", verifyResult.Report.Transparency.WitnessStatus);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_KmsBundle_Passes_WhenTwoSignaturesRequired()
|
||||
{
|
||||
var options = Options.Create(new AttestorOptions
|
||||
{
|
||||
Redis = new AttestorOptions.RedisOptions { Url = string.Empty },
|
||||
Rekor = new AttestorOptions.RekorOptions
|
||||
{
|
||||
Primary = new AttestorOptions.RekorBackendOptions
|
||||
{
|
||||
Url = "https://rekor.stellaops.test",
|
||||
ProofTimeoutMs = 1000,
|
||||
PollIntervalMs = 50,
|
||||
MaxAttempts = 2
|
||||
}
|
||||
},
|
||||
Security = new AttestorOptions.SecurityOptions
|
||||
{
|
||||
SignerIdentity = new AttestorOptions.SignerIdentityOptions
|
||||
{
|
||||
Mode = { "kms" },
|
||||
KmsKeys = { HmacSecretBase64 }
|
||||
}
|
||||
},
|
||||
Verification = new AttestorOptions.VerificationOptions
|
||||
{
|
||||
MinimumSignatures = 2,
|
||||
PolicyId = "policy/dual-sign"
|
||||
}
|
||||
});
|
||||
|
||||
using var metrics = new AttestorMetrics();
|
||||
using var activitySource = new AttestorActivitySource();
|
||||
var canonicalizer = new DefaultDsseCanonicalizer();
|
||||
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
|
||||
var repository = new InMemoryAttestorEntryRepository();
|
||||
var dedupeStore = new InMemoryAttestorDedupeStore();
|
||||
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
|
||||
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
|
||||
var auditSink = new InMemoryAttestorAuditSink();
|
||||
var submissionService = new AttestorSubmissionService(
|
||||
new AttestorSubmissionValidator(canonicalizer),
|
||||
repository,
|
||||
dedupeStore,
|
||||
rekorClient,
|
||||
new NullTransparencyWitnessClient(),
|
||||
archiveStore,
|
||||
auditSink,
|
||||
new NullVerificationCache(),
|
||||
options,
|
||||
new NullLogger<AttestorSubmissionService>(),
|
||||
TimeProvider.System,
|
||||
metrics);
|
||||
|
||||
var submission = CreateSubmissionRequestWithTwoSignatures(canonicalizer, HmacSecret);
|
||||
var context = new SubmissionContext
|
||||
{
|
||||
CallerSubject = "urn:stellaops:signer",
|
||||
CallerAudience = "attestor",
|
||||
CallerClientId = "signer-service",
|
||||
CallerTenant = "default"
|
||||
};
|
||||
|
||||
var response = await submissionService.SubmitAsync(submission, context);
|
||||
|
||||
var verificationService = new AttestorVerificationService(
|
||||
repository,
|
||||
canonicalizer,
|
||||
rekorClient,
|
||||
new NullTransparencyWitnessClient(),
|
||||
engine,
|
||||
options,
|
||||
new NullLogger<AttestorVerificationService>(),
|
||||
metrics,
|
||||
activitySource,
|
||||
TimeProvider.System);
|
||||
|
||||
var verifyResult = await verificationService.VerifyAsync(new AttestorVerificationRequest
|
||||
{
|
||||
Uuid = response.Uuid,
|
||||
Bundle = submission.Bundle
|
||||
});
|
||||
|
||||
Assert.True(verifyResult.Ok);
|
||||
Assert.Equal(VerificationSectionStatus.Pass, verifyResult.Report!.Signatures.Status);
|
||||
Assert.Equal(2, verifyResult.Report.Signatures.VerifiedSignatures);
|
||||
Assert.Equal(2, verifyResult.Report.Signatures.RequiredSignatures);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_FlagsTamperedBundle()
|
||||
{
|
||||
@@ -262,6 +351,32 @@ public sealed class AttestorVerificationServiceTests
|
||||
return request;
|
||||
}
|
||||
|
||||
private static AttestorSubmissionRequest CreateSubmissionRequestWithTwoSignatures(DefaultDsseCanonicalizer canonicalizer, byte[] hmacSecret)
|
||||
{
|
||||
var request = CreateSubmissionRequest(canonicalizer, hmacSecret);
|
||||
|
||||
// Recompute signature and append a second copy to satisfy multi-signature verification
|
||||
if (!TryDecodeBase64(request.Bundle.Dsse.PayloadBase64, out var payload))
|
||||
{
|
||||
throw new InvalidOperationException("Test payload failed to decode.");
|
||||
}
|
||||
|
||||
var preAuth = ComputePreAuthEncodingForTests(request.Bundle.Dsse.PayloadType, payload);
|
||||
using (var hmac = new HMACSHA256(hmacSecret))
|
||||
{
|
||||
var signature = hmac.ComputeHash(preAuth);
|
||||
request.Bundle.Dsse.Signatures.Add(new AttestorSubmissionRequest.DsseSignature
|
||||
{
|
||||
KeyId = "kms-test-2",
|
||||
Signature = Convert.ToBase64String(signature)
|
||||
});
|
||||
}
|
||||
|
||||
var canonical = canonicalizer.CanonicalizeAsync(request).GetAwaiter().GetResult();
|
||||
request.Meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant();
|
||||
return request;
|
||||
}
|
||||
|
||||
private static AttestorSubmissionRequest.SubmissionBundle CloneBundle(AttestorSubmissionRequest.SubmissionBundle source)
|
||||
{
|
||||
var clone = new AttestorSubmissionRequest.SubmissionBundle
|
||||
|
||||
@@ -77,6 +77,7 @@ internal static class CommandFactory
|
||||
root.Add(BuildSdkCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildMirrorCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildAirgapCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildDevPortalCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(SystemCommandBuilder.BuildSystemCommand(services, verboseOption, cancellationToken));
|
||||
|
||||
var pluginLogger = loggerFactory.CreateLogger<CliCommandModuleLoader>();
|
||||
@@ -10632,5 +10633,53 @@ internal static class CommandFactory
|
||||
|
||||
return airgap;
|
||||
}
|
||||
|
||||
private static Command BuildDevPortalCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
|
||||
{
|
||||
var devportal = new Command("devportal", "Manage DevPortal offline operations.");
|
||||
|
||||
// devportal verify (DVOFF-64-002)
|
||||
var verify = new Command("verify", "Verify integrity of a DevPortal/evidence bundle before import.");
|
||||
|
||||
var bundleOption = new Option<string>("--bundle", new[] { "-b" })
|
||||
{
|
||||
Description = "Path to the bundle .tgz file.",
|
||||
Required = true
|
||||
};
|
||||
|
||||
var offlineOption = new Option<bool>("--offline")
|
||||
{
|
||||
Description = "Skip TSA verification and online checks."
|
||||
};
|
||||
|
||||
var jsonOption = new Option<bool>("--json")
|
||||
{
|
||||
Description = "Output results in JSON format."
|
||||
};
|
||||
|
||||
verify.Add(bundleOption);
|
||||
verify.Add(offlineOption);
|
||||
verify.Add(jsonOption);
|
||||
|
||||
verify.SetAction((parseResult, _) =>
|
||||
{
|
||||
var bundlePath = parseResult.GetValue(bundleOption)!;
|
||||
var offline = parseResult.GetValue(offlineOption);
|
||||
var json = parseResult.GetValue(jsonOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleDevPortalVerifyAsync(
|
||||
services,
|
||||
bundlePath,
|
||||
offline,
|
||||
json,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
devportal.Add(verify);
|
||||
|
||||
return devportal;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -27,17 +27,17 @@ using StellaOps.Cli.Configuration;
|
||||
using StellaOps.Cli.Output;
|
||||
using StellaOps.Cli.Prompts;
|
||||
using StellaOps.Cli.Services;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
using StellaOps.Cli.Services.Models.AdvisoryAi;
|
||||
using StellaOps.Cli.Services.Models.Bun;
|
||||
using StellaOps.Cli.Services.Models.Ruby;
|
||||
using StellaOps.Cli.Telemetry;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.DependencyInjection;
|
||||
using StellaOps.Cryptography.Kms;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using StellaOps.Policy.Scoring.Engine;
|
||||
using StellaOps.Policy.Scoring.Policies;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
using StellaOps.Cli.Services.Models.AdvisoryAi;
|
||||
using StellaOps.Cli.Services.Models.Bun;
|
||||
using StellaOps.Cli.Services.Models.Ruby;
|
||||
using StellaOps.Cli.Telemetry;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.DependencyInjection;
|
||||
using StellaOps.Cryptography.Kms;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using StellaOps.Policy.Scoring.Engine;
|
||||
using StellaOps.Policy.Scoring.Policies;
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Node;
|
||||
@@ -70,17 +70,17 @@ internal static class CommandHandlers
|
||||
/// <summary>
|
||||
/// JSON serializer options for output (alias for JsonOptions).
|
||||
/// </summary>
|
||||
private static readonly JsonSerializerOptions JsonOutputOptions = JsonOptions;
|
||||
|
||||
private static readonly JsonSerializerOptions CompactJson = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
};
|
||||
private static readonly JsonSerializerOptions JsonOutputOptions = JsonOptions;
|
||||
|
||||
private static readonly JsonSerializerOptions CompactJson = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Sets the verbosity level for logging.
|
||||
/// </summary>
|
||||
private static void SetVerbosity(IServiceProvider services, bool verbose)
|
||||
private static void SetVerbosity(IServiceProvider services, bool verbose)
|
||||
{
|
||||
// Configure logging level based on verbose flag
|
||||
var loggerFactory = services.GetService<ILoggerFactory>();
|
||||
@@ -90,215 +90,215 @@ internal static class CommandHandlers
|
||||
var logger = loggerFactory.CreateLogger("StellaOps.Cli.Commands.CommandHandlers");
|
||||
logger.LogDebug("Verbose logging enabled");
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssScoreAsync(
|
||||
IServiceProvider services,
|
||||
string vulnerabilityId,
|
||||
string policyPath,
|
||||
string vector,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-score");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var policyJson = await File.ReadAllTextAsync(policyPath, cancellationToken).ConfigureAwait(false);
|
||||
var loader = new CvssPolicyLoader();
|
||||
var policyResult = loader.Load(policyJson, cancellationToken);
|
||||
if (!policyResult.IsValid || policyResult.Policy is null || string.IsNullOrWhiteSpace(policyResult.Hash))
|
||||
{
|
||||
var errors = string.Join("; ", policyResult.Errors.Select(e => $"{e.Path}: {e.Message}"));
|
||||
throw new InvalidOperationException($"Policy invalid: {errors}");
|
||||
}
|
||||
|
||||
var policy = policyResult.Policy with { Hash = policyResult.Hash };
|
||||
|
||||
var engine = scope.ServiceProvider.GetRequiredService<ICvssV4Engine>();
|
||||
var parsed = engine.ParseVector(vector);
|
||||
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
|
||||
var request = new CreateCvssReceipt(
|
||||
vulnerabilityId,
|
||||
policy,
|
||||
parsed.BaseMetrics,
|
||||
parsed.ThreatMetrics,
|
||||
parsed.EnvironmentalMetrics,
|
||||
parsed.SupplementalMetrics,
|
||||
Array.Empty<CvssEvidenceItem>(),
|
||||
SigningKey: null,
|
||||
CreatedBy: "cli",
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var receipt = await client.CreateReceiptAsync(request, cancellationToken).ConfigureAwait(false)
|
||||
?? throw new InvalidOperationException("CVSS receipt creation failed.");
|
||||
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($"✔ CVSS receipt {receipt.ReceiptId} created | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
|
||||
Console.WriteLine($"Vector: {receipt.VectorString}");
|
||||
Console.WriteLine($"Policy: {receipt.PolicyRef.PolicyId} v{receipt.PolicyRef.Version} ({receipt.PolicyRef.Hash})");
|
||||
}
|
||||
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to create CVSS receipt");
|
||||
Environment.ExitCode = 1;
|
||||
if (json)
|
||||
{
|
||||
var problem = new { error = "cvss_score_failed", message = ex.Message };
|
||||
Console.WriteLine(JsonSerializer.Serialize(problem, CompactJson));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssShowAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-show");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (receipt is null)
|
||||
{
|
||||
Environment.ExitCode = 5;
|
||||
Console.WriteLine(json
|
||||
? JsonSerializer.Serialize(new { error = "not_found", receiptId }, CompactJson)
|
||||
: $"✖ Receipt {receiptId} not found");
|
||||
return;
|
||||
}
|
||||
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($"Receipt {receipt.ReceiptId} | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
|
||||
Console.WriteLine($"Created {receipt.CreatedAt:u} by {receipt.CreatedBy}");
|
||||
Console.WriteLine($"Vector: {receipt.VectorString}");
|
||||
}
|
||||
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to fetch CVSS receipt {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssHistoryAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-history");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var history = await client.GetHistoryAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(history, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
if (history.Count == 0)
|
||||
{
|
||||
Console.WriteLine("(no history)");
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (var entry in history.OrderBy(h => h.Timestamp))
|
||||
{
|
||||
Console.WriteLine($"{entry.Timestamp:u} | {entry.Actor} | {entry.ChangeType} {entry.Field} => {entry.NewValue ?? ""} ({entry.Reason})");
|
||||
}
|
||||
}
|
||||
}
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to fetch CVSS receipt history {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssExportAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
string format,
|
||||
string? output,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-export");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (receipt is null)
|
||||
{
|
||||
Environment.ExitCode = 5;
|
||||
Console.WriteLine($"✖ Receipt {receiptId} not found");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!string.Equals(format, "json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Environment.ExitCode = 9;
|
||||
Console.WriteLine("Only json export is supported at this time.");
|
||||
return;
|
||||
}
|
||||
|
||||
var targetPath = string.IsNullOrWhiteSpace(output)
|
||||
? $"cvss-receipt-{receipt.ReceiptId}.json"
|
||||
: output!;
|
||||
|
||||
var jsonPayload = JsonSerializer.Serialize(receipt, CompactJson);
|
||||
await File.WriteAllTextAsync(targetPath, jsonPayload, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
Console.WriteLine($"✔ Exported receipt to {targetPath}");
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to export CVSS receipt {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssScoreAsync(
|
||||
IServiceProvider services,
|
||||
string vulnerabilityId,
|
||||
string policyPath,
|
||||
string vector,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-score");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var policyJson = await File.ReadAllTextAsync(policyPath, cancellationToken).ConfigureAwait(false);
|
||||
var loader = new CvssPolicyLoader();
|
||||
var policyResult = loader.Load(policyJson, cancellationToken);
|
||||
if (!policyResult.IsValid || policyResult.Policy is null || string.IsNullOrWhiteSpace(policyResult.Hash))
|
||||
{
|
||||
var errors = string.Join("; ", policyResult.Errors.Select(e => $"{e.Path}: {e.Message}"));
|
||||
throw new InvalidOperationException($"Policy invalid: {errors}");
|
||||
}
|
||||
|
||||
var policy = policyResult.Policy with { Hash = policyResult.Hash };
|
||||
|
||||
var engine = scope.ServiceProvider.GetRequiredService<ICvssV4Engine>();
|
||||
var parsed = engine.ParseVector(vector);
|
||||
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
|
||||
var request = new CreateCvssReceipt(
|
||||
vulnerabilityId,
|
||||
policy,
|
||||
parsed.BaseMetrics,
|
||||
parsed.ThreatMetrics,
|
||||
parsed.EnvironmentalMetrics,
|
||||
parsed.SupplementalMetrics,
|
||||
Array.Empty<CvssEvidenceItem>(),
|
||||
SigningKey: null,
|
||||
CreatedBy: "cli",
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var receipt = await client.CreateReceiptAsync(request, cancellationToken).ConfigureAwait(false)
|
||||
?? throw new InvalidOperationException("CVSS receipt creation failed.");
|
||||
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($"✔ CVSS receipt {receipt.ReceiptId} created | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
|
||||
Console.WriteLine($"Vector: {receipt.VectorString}");
|
||||
Console.WriteLine($"Policy: {receipt.PolicyRef.PolicyId} v{receipt.PolicyRef.Version} ({receipt.PolicyRef.Hash})");
|
||||
}
|
||||
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to create CVSS receipt");
|
||||
Environment.ExitCode = 1;
|
||||
if (json)
|
||||
{
|
||||
var problem = new { error = "cvss_score_failed", message = ex.Message };
|
||||
Console.WriteLine(JsonSerializer.Serialize(problem, CompactJson));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssShowAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-show");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (receipt is null)
|
||||
{
|
||||
Environment.ExitCode = 5;
|
||||
Console.WriteLine(json
|
||||
? JsonSerializer.Serialize(new { error = "not_found", receiptId }, CompactJson)
|
||||
: $"✖ Receipt {receiptId} not found");
|
||||
return;
|
||||
}
|
||||
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($"Receipt {receipt.ReceiptId} | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
|
||||
Console.WriteLine($"Created {receipt.CreatedAt:u} by {receipt.CreatedBy}");
|
||||
Console.WriteLine($"Vector: {receipt.VectorString}");
|
||||
}
|
||||
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to fetch CVSS receipt {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssHistoryAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
bool json,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-history");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var history = await client.GetHistoryAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (json)
|
||||
{
|
||||
Console.WriteLine(JsonSerializer.Serialize(history, CompactJson));
|
||||
}
|
||||
else
|
||||
{
|
||||
if (history.Count == 0)
|
||||
{
|
||||
Console.WriteLine("(no history)");
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (var entry in history.OrderBy(h => h.Timestamp))
|
||||
{
|
||||
Console.WriteLine($"{entry.Timestamp:u} | {entry.Actor} | {entry.ChangeType} {entry.Field} => {entry.NewValue ?? ""} ({entry.Reason})");
|
||||
}
|
||||
}
|
||||
}
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to fetch CVSS receipt history {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task HandleCvssExportAsync(
|
||||
IServiceProvider services,
|
||||
string receiptId,
|
||||
string format,
|
||||
string? output,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-export");
|
||||
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
|
||||
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
|
||||
|
||||
try
|
||||
{
|
||||
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
|
||||
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
|
||||
if (receipt is null)
|
||||
{
|
||||
Environment.ExitCode = 5;
|
||||
Console.WriteLine($"✖ Receipt {receiptId} not found");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!string.Equals(format, "json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Environment.ExitCode = 9;
|
||||
Console.WriteLine("Only json export is supported at this time.");
|
||||
return;
|
||||
}
|
||||
|
||||
var targetPath = string.IsNullOrWhiteSpace(output)
|
||||
? $"cvss-receipt-{receipt.ReceiptId}.json"
|
||||
: output!;
|
||||
|
||||
var jsonPayload = JsonSerializer.Serialize(receipt, CompactJson);
|
||||
await File.WriteAllTextAsync(targetPath, jsonPayload, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
Console.WriteLine($"✔ Exported receipt to {targetPath}");
|
||||
Environment.ExitCode = 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to export CVSS receipt {ReceiptId}", receiptId);
|
||||
Environment.ExitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task VerifyBundleAsync(string path, ILogger logger, CancellationToken cancellationToken)
|
||||
{
|
||||
@@ -29676,4 +29676,105 @@ stella policy test {policyName}.stella
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DevPortal Commands
|
||||
|
||||
/// <summary>
|
||||
/// Handler for 'stella devportal verify' command (DVOFF-64-002).
|
||||
/// Verifies integrity of a DevPortal/evidence bundle before import.
|
||||
/// Exit codes: 0 success, 2 checksum mismatch, 3 signature failure, 4 TSA missing, 5 unexpected.
|
||||
/// </summary>
|
||||
public static async Task<int> HandleDevPortalVerifyAsync(
|
||||
IServiceProvider services,
|
||||
string bundlePath,
|
||||
bool offline,
|
||||
bool emitJson,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var loggerFactory = scope.ServiceProvider.GetRequiredService<ILoggerFactory>();
|
||||
var logger = loggerFactory.CreateLogger<DevPortalBundleVerifier>();
|
||||
var verifier = new DevPortalBundleVerifier(logger);
|
||||
|
||||
using var activity = CliActivitySource.Instance.StartActivity("cli.devportal.verify", System.Diagnostics.ActivityKind.Client);
|
||||
activity?.SetTag("stellaops.cli.command", "devportal verify");
|
||||
activity?.SetTag("stellaops.cli.devportal.offline", offline);
|
||||
using var duration = CliMetrics.MeasureCommandDuration("devportal verify");
|
||||
|
||||
try
|
||||
{
|
||||
var resolvedPath = Path.GetFullPath(bundlePath);
|
||||
|
||||
if (verbose)
|
||||
{
|
||||
AnsiConsole.MarkupLine($"[grey]Verifying bundle: {Markup.Escape(resolvedPath)}[/]");
|
||||
if (offline)
|
||||
{
|
||||
AnsiConsole.MarkupLine("[grey]Mode: offline (TSA verification skipped)[/]");
|
||||
}
|
||||
}
|
||||
|
||||
var result = await verifier.VerifyBundleAsync(resolvedPath, offline, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
activity?.SetTag("stellaops.cli.devportal.status", result.Status);
|
||||
activity?.SetTag("stellaops.cli.devportal.exit_code", (int)result.ExitCode);
|
||||
|
||||
if (emitJson)
|
||||
{
|
||||
Console.WriteLine(result.ToJson());
|
||||
}
|
||||
else
|
||||
{
|
||||
if (result.ExitCode == DevPortalVerifyExitCode.Success)
|
||||
{
|
||||
AnsiConsole.MarkupLine("[green]Bundle verification successful.[/]");
|
||||
AnsiConsole.MarkupLine($" Bundle ID: {Markup.Escape(result.BundleId ?? "unknown")}");
|
||||
AnsiConsole.MarkupLine($" Root Hash: {Markup.Escape(result.RootHash ?? "unknown")}");
|
||||
AnsiConsole.MarkupLine($" Entries: {result.Entries}");
|
||||
AnsiConsole.MarkupLine($" Created: {result.CreatedAt?.ToString("O") ?? "unknown"}");
|
||||
AnsiConsole.MarkupLine($" Portable: {(result.Portable ? "yes" : "no")}");
|
||||
}
|
||||
else
|
||||
{
|
||||
AnsiConsole.MarkupLine($"[red]Bundle verification failed:[/] {Markup.Escape(result.ErrorMessage ?? "Unknown error")}");
|
||||
if (!string.IsNullOrEmpty(result.ErrorDetail))
|
||||
{
|
||||
AnsiConsole.MarkupLine($" [grey]{Markup.Escape(result.ErrorDetail)}[/]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return (int)result.ExitCode;
|
||||
}
|
||||
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
if (!emitJson)
|
||||
{
|
||||
AnsiConsole.MarkupLine("[yellow]Operation cancelled.[/]");
|
||||
}
|
||||
return 130;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Failed to verify bundle");
|
||||
|
||||
if (emitJson)
|
||||
{
|
||||
var errorResult = DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.Unexpected,
|
||||
ex.Message);
|
||||
Console.WriteLine(errorResult.ToJson());
|
||||
}
|
||||
else
|
||||
{
|
||||
AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}");
|
||||
}
|
||||
|
||||
return (int)DevPortalVerifyExitCode.Unexpected;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
533
src/Cli/StellaOps.Cli/Services/AttestationBundleVerifier.cs
Normal file
533
src/Cli/StellaOps.Cli/Services/AttestationBundleVerifier.cs
Normal file
@@ -0,0 +1,533 @@
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Verifier for attestation bundles exported from the Export Center.
|
||||
/// Per EXPORT-ATTEST-75-001.
|
||||
/// </summary>
|
||||
internal sealed class AttestationBundleVerifier : IAttestationBundleVerifier
|
||||
{
|
||||
private const string DsseEnvelopeFileName = "attestation.dsse.json";
|
||||
private const string StatementFileName = "statement.json";
|
||||
private const string TransparencyFileName = "transparency.ndjson";
|
||||
private const string MetadataFileName = "metadata.json";
|
||||
private const string ChecksumsFileName = "checksums.txt";
|
||||
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly ILogger<AttestationBundleVerifier> _logger;
|
||||
|
||||
public AttestationBundleVerifier(ILogger<AttestationBundleVerifier> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<AttestationBundleVerifyResult> VerifyAsync(
|
||||
AttestationBundleVerifyOptions options,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(options.FilePath);
|
||||
|
||||
_logger.LogDebug("Verifying attestation bundle at {FilePath}, offline={Offline}",
|
||||
options.FilePath, options.Offline);
|
||||
|
||||
// Step 1: Check bundle exists
|
||||
if (!File.Exists(options.FilePath))
|
||||
{
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.FileNotFound,
|
||||
"Bundle file not found",
|
||||
options.FilePath);
|
||||
}
|
||||
|
||||
// Step 2: Verify SHA-256 against .sha256 file if present
|
||||
var sha256Path = options.FilePath + ".sha256";
|
||||
if (File.Exists(sha256Path))
|
||||
{
|
||||
var checksumResult = await VerifyBundleChecksumAsync(options.FilePath, sha256Path, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (!checksumResult.IsValid)
|
||||
{
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.ChecksumMismatch,
|
||||
"SHA-256 checksum mismatch",
|
||||
options.FilePath,
|
||||
$"Expected: {checksumResult.ExpectedHash}, Computed: {checksumResult.ActualHash}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug("No co-located .sha256 file found for external checksum verification");
|
||||
}
|
||||
|
||||
// Step 3: Extract and parse bundle contents
|
||||
BundleContents contents;
|
||||
try
|
||||
{
|
||||
contents = await ExtractBundleContentsAsync(options.FilePath, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex) when (ex is InvalidDataException or JsonException or IOException)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to extract bundle contents");
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.FormatError,
|
||||
"Failed to extract bundle contents",
|
||||
options.FilePath,
|
||||
ex.Message);
|
||||
}
|
||||
|
||||
// Step 4: Verify internal checksums from checksums.txt
|
||||
if (contents.ChecksumsText is not null)
|
||||
{
|
||||
var internalCheckResult = VerifyInternalChecksums(contents);
|
||||
if (!internalCheckResult.Success)
|
||||
{
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.ChecksumMismatch,
|
||||
"Internal checksum verification failed",
|
||||
options.FilePath,
|
||||
internalCheckResult.ErrorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 5: Verify DSSE signature
|
||||
var signatureValid = VerifyDsseSignature(contents, options.Offline, out var signatureError);
|
||||
if (!signatureValid && !string.IsNullOrEmpty(signatureError))
|
||||
{
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.SignatureFailure,
|
||||
"DSSE signature verification failed",
|
||||
options.FilePath,
|
||||
signatureError);
|
||||
}
|
||||
|
||||
// Step 6: Check transparency entries (only if not offline and verifyTransparency is true)
|
||||
if (!options.Offline && options.VerifyTransparency)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(contents.TransparencyNdjson))
|
||||
{
|
||||
return CreateFailedResult(
|
||||
AttestationBundleExitCodes.MissingTransparency,
|
||||
"Transparency log entry missing",
|
||||
options.FilePath,
|
||||
"Bundle requires transparency.ndjson when not in offline mode");
|
||||
}
|
||||
}
|
||||
|
||||
// Step 7: Build success result
|
||||
var metadata = contents.Metadata;
|
||||
var subjects = ExtractSubjects(contents);
|
||||
|
||||
return new AttestationBundleVerifyResult(
|
||||
Success: true,
|
||||
Status: "verified",
|
||||
ExportId: metadata?.ExportId,
|
||||
AttestationId: metadata?.AttestationId,
|
||||
RootHash: FormatRootHash(metadata?.RootHash),
|
||||
Subjects: subjects,
|
||||
PredicateType: ExtractPredicateType(contents),
|
||||
StatementVersion: metadata?.StatementVersion,
|
||||
BundlePath: options.FilePath,
|
||||
ExitCode: AttestationBundleExitCodes.Success);
|
||||
}
|
||||
|
||||
public async Task<AttestationBundleImportResult> ImportAsync(
|
||||
AttestationBundleImportOptions options,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(options.FilePath);
|
||||
|
||||
_logger.LogDebug("Importing attestation bundle from {FilePath}", options.FilePath);
|
||||
|
||||
// First verify the bundle
|
||||
var verifyOptions = new AttestationBundleVerifyOptions(
|
||||
options.FilePath,
|
||||
options.Offline,
|
||||
options.VerifyTransparency,
|
||||
options.TrustRootPath);
|
||||
|
||||
var verifyResult = await VerifyAsync(verifyOptions, cancellationToken).ConfigureAwait(false);
|
||||
if (!verifyResult.Success)
|
||||
{
|
||||
return new AttestationBundleImportResult(
|
||||
Success: false,
|
||||
Status: "verification_failed",
|
||||
AttestationId: verifyResult.AttestationId,
|
||||
TenantId: null,
|
||||
Namespace: options.Namespace,
|
||||
RootHash: verifyResult.RootHash,
|
||||
ErrorMessage: verifyResult.ErrorMessage,
|
||||
ExitCode: verifyResult.ExitCode);
|
||||
}
|
||||
|
||||
// Extract metadata for import
|
||||
BundleContents contents;
|
||||
try
|
||||
{
|
||||
contents = await ExtractBundleContentsAsync(options.FilePath, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new AttestationBundleImportResult(
|
||||
Success: false,
|
||||
Status: "extraction_failed",
|
||||
AttestationId: null,
|
||||
TenantId: null,
|
||||
Namespace: options.Namespace,
|
||||
RootHash: null,
|
||||
ErrorMessage: ex.Message,
|
||||
ExitCode: AttestationBundleExitCodes.ImportFailed);
|
||||
}
|
||||
|
||||
var metadata = contents.Metadata;
|
||||
var tenantId = options.Tenant ?? metadata?.TenantId;
|
||||
|
||||
// Import is a local-only operation for air-gap scenarios
|
||||
// The actual import to backend would happen via separate API call
|
||||
_logger.LogInformation("Attestation bundle imported: {AttestationId} for tenant {TenantId}",
|
||||
metadata?.AttestationId, tenantId);
|
||||
|
||||
return new AttestationBundleImportResult(
|
||||
Success: true,
|
||||
Status: "imported",
|
||||
AttestationId: metadata?.AttestationId,
|
||||
TenantId: tenantId,
|
||||
Namespace: options.Namespace,
|
||||
RootHash: FormatRootHash(metadata?.RootHash),
|
||||
ExitCode: AttestationBundleExitCodes.Success);
|
||||
}
|
||||
|
||||
private async Task<(bool IsValid, string? ExpectedHash, string? ActualHash)> VerifyBundleChecksumAsync(
|
||||
string bundlePath,
|
||||
string sha256Path,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Read expected hash from .sha256 file
|
||||
var content = await File.ReadAllTextAsync(sha256Path, cancellationToken).ConfigureAwait(false);
|
||||
var expectedHash = content.Split(' ', StringSplitOptions.RemoveEmptyEntries).FirstOrDefault()?.Trim()?.ToLowerInvariant();
|
||||
|
||||
if (string.IsNullOrEmpty(expectedHash))
|
||||
{
|
||||
return (false, null, null);
|
||||
}
|
||||
|
||||
// Compute actual hash
|
||||
await using var stream = File.OpenRead(bundlePath);
|
||||
var hashBytes = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
var actualHash = Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||
|
||||
return (string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase), expectedHash, actualHash);
|
||||
}
|
||||
|
||||
private async Task<BundleContents> ExtractBundleContentsAsync(
|
||||
string bundlePath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var contents = new BundleContents();
|
||||
|
||||
await using var fileStream = File.OpenRead(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
using var tarReader = new TarReader(gzipStream);
|
||||
|
||||
TarEntry? entry;
|
||||
while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) is not null)
|
||||
{
|
||||
if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
using var memoryStream = new MemoryStream();
|
||||
await entry.DataStream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
|
||||
var data = memoryStream.ToArray();
|
||||
var text = System.Text.Encoding.UTF8.GetString(data);
|
||||
|
||||
switch (entry.Name)
|
||||
{
|
||||
case DsseEnvelopeFileName:
|
||||
contents.DsseEnvelopeJson = text;
|
||||
contents.DsseEnvelopeBytes = data;
|
||||
contents.DsseEnvelope = JsonSerializer.Deserialize<DsseEnvelope>(text, SerializerOptions);
|
||||
break;
|
||||
case StatementFileName:
|
||||
contents.StatementJson = text;
|
||||
contents.StatementBytes = data;
|
||||
contents.Statement = JsonSerializer.Deserialize<InTotoStatement>(text, SerializerOptions);
|
||||
break;
|
||||
case TransparencyFileName:
|
||||
contents.TransparencyNdjson = text;
|
||||
contents.TransparencyBytes = data;
|
||||
break;
|
||||
case MetadataFileName:
|
||||
contents.MetadataJson = text;
|
||||
contents.MetadataBytes = data;
|
||||
contents.Metadata = JsonSerializer.Deserialize<AttestationBundleMetadata>(text, SerializerOptions);
|
||||
break;
|
||||
case ChecksumsFileName:
|
||||
contents.ChecksumsText = text;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return contents;
|
||||
}
|
||||
|
||||
private (bool Success, string? ErrorMessage) VerifyInternalChecksums(BundleContents contents)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(contents.ChecksumsText))
|
||||
{
|
||||
return (true, null);
|
||||
}
|
||||
|
||||
var lines = contents.ChecksumsText.Split('\n', StringSplitOptions.RemoveEmptyEntries);
|
||||
foreach (var line in lines)
|
||||
{
|
||||
// Skip comments
|
||||
if (line.TrimStart().StartsWith('#'))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse "hash filename" format
|
||||
var parts = line.Split(new[] { ' ' }, 2, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (parts.Length != 2)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var expectedHash = parts[0].Trim().ToLowerInvariant();
|
||||
var fileName = parts[1].Trim();
|
||||
|
||||
byte[]? fileBytes = fileName switch
|
||||
{
|
||||
DsseEnvelopeFileName => contents.DsseEnvelopeBytes,
|
||||
StatementFileName => contents.StatementBytes,
|
||||
TransparencyFileName => contents.TransparencyBytes,
|
||||
MetadataFileName => contents.MetadataBytes,
|
||||
_ => null
|
||||
};
|
||||
|
||||
if (fileBytes is null)
|
||||
{
|
||||
// File not found in bundle - could be optional
|
||||
if (fileName == TransparencyFileName)
|
||||
{
|
||||
continue; // transparency.ndjson is optional
|
||||
}
|
||||
|
||||
return (false, $"File '{fileName}' referenced in checksums but not found in bundle");
|
||||
}
|
||||
|
||||
var actualHash = Convert.ToHexString(SHA256.HashData(fileBytes)).ToLowerInvariant();
|
||||
if (!string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return (false, $"Checksum mismatch for '{fileName}': expected {expectedHash}, got {actualHash}");
|
||||
}
|
||||
}
|
||||
|
||||
return (true, null);
|
||||
}
|
||||
|
||||
private bool VerifyDsseSignature(BundleContents contents, bool offline, out string? error)
|
||||
{
|
||||
error = null;
|
||||
|
||||
if (contents.DsseEnvelope is null || string.IsNullOrEmpty(contents.DsseEnvelope.Payload))
|
||||
{
|
||||
error = "DSSE envelope not found or has no payload";
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify payload matches statement
|
||||
if (contents.StatementJson is not null)
|
||||
{
|
||||
try
|
||||
{
|
||||
var payloadBytes = Convert.FromBase64String(contents.DsseEnvelope.Payload);
|
||||
var payloadJson = System.Text.Encoding.UTF8.GetString(payloadBytes);
|
||||
|
||||
// Compare parsed JSON to handle whitespace differences
|
||||
using var statementDoc = JsonDocument.Parse(contents.StatementJson);
|
||||
using var payloadDoc = JsonDocument.Parse(payloadJson);
|
||||
|
||||
// Check _type field matches
|
||||
var statementType = statementDoc.RootElement.TryGetProperty("_type", out var sType)
|
||||
? sType.GetString()
|
||||
: null;
|
||||
var payloadType = payloadDoc.RootElement.TryGetProperty("_type", out var pType)
|
||||
? pType.GetString()
|
||||
: null;
|
||||
|
||||
if (!string.Equals(statementType, payloadType, StringComparison.Ordinal))
|
||||
{
|
||||
error = "DSSE payload does not match statement _type";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (FormatException ex)
|
||||
{
|
||||
error = $"Invalid DSSE payload encoding: {ex.Message}";
|
||||
return false;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
error = $"Invalid DSSE payload JSON: {ex.Message}";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// In offline mode, we don't verify the actual cryptographic signature
|
||||
// (would require access to signing keys/certificates)
|
||||
if (offline)
|
||||
{
|
||||
_logger.LogDebug("Offline mode: skipping cryptographic signature verification");
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check that signatures exist
|
||||
if (contents.DsseEnvelope.Signatures is null || contents.DsseEnvelope.Signatures.Count == 0)
|
||||
{
|
||||
error = "DSSE envelope has no signatures";
|
||||
return false;
|
||||
}
|
||||
|
||||
// Online signature verification would require access to trust roots
|
||||
// For now, we trust the signature if payload matches and signatures exist
|
||||
return true;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string>? ExtractSubjects(BundleContents contents)
|
||||
{
|
||||
if (contents.Statement?.Subject is null || contents.Statement.Subject.Count == 0)
|
||||
{
|
||||
// Fall back to metadata subjects
|
||||
if (contents.Metadata?.SubjectDigests is not null)
|
||||
{
|
||||
return contents.Metadata.SubjectDigests
|
||||
.Select(s => $"{s.Name}@{s.Algorithm}:{s.Digest}")
|
||||
.ToList();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
return contents.Statement.Subject
|
||||
.Select(s =>
|
||||
{
|
||||
var digest = s.Digest?.FirstOrDefault();
|
||||
return digest.HasValue
|
||||
? $"{s.Name}@{digest.Value.Key}:{digest.Value.Value}"
|
||||
: s.Name ?? "unknown";
|
||||
})
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private static string? ExtractPredicateType(BundleContents contents)
|
||||
{
|
||||
return contents.Statement?.PredicateType ?? contents.DsseEnvelope?.PayloadType;
|
||||
}
|
||||
|
||||
private static string? FormatRootHash(string? rootHash)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(rootHash))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return rootHash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
|
||||
? rootHash
|
||||
: $"sha256:{rootHash}";
|
||||
}
|
||||
|
||||
private static AttestationBundleVerifyResult CreateFailedResult(
|
||||
int exitCode,
|
||||
string message,
|
||||
string bundlePath,
|
||||
string? detail = null)
|
||||
=> new(
|
||||
Success: false,
|
||||
Status: "failed",
|
||||
ExportId: null,
|
||||
AttestationId: null,
|
||||
RootHash: null,
|
||||
Subjects: null,
|
||||
PredicateType: null,
|
||||
StatementVersion: null,
|
||||
BundlePath: bundlePath,
|
||||
ErrorMessage: detail ?? message,
|
||||
ExitCode: exitCode);
|
||||
|
||||
private sealed class BundleContents
|
||||
{
|
||||
public string? DsseEnvelopeJson { get; set; }
|
||||
public byte[]? DsseEnvelopeBytes { get; set; }
|
||||
public DsseEnvelope? DsseEnvelope { get; set; }
|
||||
|
||||
public string? StatementJson { get; set; }
|
||||
public byte[]? StatementBytes { get; set; }
|
||||
public InTotoStatement? Statement { get; set; }
|
||||
|
||||
public string? TransparencyNdjson { get; set; }
|
||||
public byte[]? TransparencyBytes { get; set; }
|
||||
|
||||
public string? MetadataJson { get; set; }
|
||||
public byte[]? MetadataBytes { get; set; }
|
||||
public AttestationBundleMetadata? Metadata { get; set; }
|
||||
|
||||
public string? ChecksumsText { get; set; }
|
||||
}
|
||||
|
||||
private sealed class DsseEnvelope
|
||||
{
|
||||
public string? PayloadType { get; set; }
|
||||
public string? Payload { get; set; }
|
||||
public IReadOnlyList<DsseSignature>? Signatures { get; set; }
|
||||
}
|
||||
|
||||
private sealed class DsseSignature
|
||||
{
|
||||
public string? KeyId { get; set; }
|
||||
public string? Sig { get; set; }
|
||||
}
|
||||
|
||||
private sealed class InTotoStatement
|
||||
{
|
||||
public string? Type { get; set; }
|
||||
public string? PredicateType { get; set; }
|
||||
public IReadOnlyList<InTotoSubject>? Subject { get; set; }
|
||||
}
|
||||
|
||||
private sealed class InTotoSubject
|
||||
{
|
||||
public string? Name { get; set; }
|
||||
public Dictionary<string, string>? Digest { get; set; }
|
||||
}
|
||||
|
||||
private sealed record AttestationBundleMetadata(
|
||||
string? Version,
|
||||
string? ExportId,
|
||||
string? AttestationId,
|
||||
string? TenantId,
|
||||
DateTimeOffset? CreatedAtUtc,
|
||||
string? RootHash,
|
||||
string? SourceUri,
|
||||
string? StatementVersion,
|
||||
IReadOnlyList<AttestationSubjectDigest>? SubjectDigests);
|
||||
|
||||
private sealed record AttestationSubjectDigest(
|
||||
string? Name,
|
||||
string? Digest,
|
||||
string? Algorithm);
|
||||
}
|
||||
380
src/Cli/StellaOps.Cli/Services/DevPortalBundleVerifier.cs
Normal file
380
src/Cli/StellaOps.Cli/Services/DevPortalBundleVerifier.cs
Normal file
@@ -0,0 +1,380 @@
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Verifier for EvidenceLocker sealed bundles used in DevPortal offline verification.
|
||||
/// Per DVOFF-64-002.
|
||||
/// </summary>
|
||||
internal sealed class DevPortalBundleVerifier : IDevPortalBundleVerifier
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly ILogger<DevPortalBundleVerifier> _logger;
|
||||
|
||||
public DevPortalBundleVerifier(ILogger<DevPortalBundleVerifier> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<DevPortalBundleVerificationResult> VerifyBundleAsync(
|
||||
string bundlePath,
|
||||
bool offline,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(bundlePath);
|
||||
|
||||
_logger.LogDebug("Verifying DevPortal bundle at {BundlePath}, offline={Offline}", bundlePath, offline);
|
||||
|
||||
// Step 1: Check bundle exists
|
||||
if (!File.Exists(bundlePath))
|
||||
{
|
||||
return DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.Unexpected,
|
||||
"Bundle file not found",
|
||||
bundlePath);
|
||||
}
|
||||
|
||||
// Step 2: Validate SHA-256 against .sha256 file if present
|
||||
var sha256Path = bundlePath + ".sha256";
|
||||
if (File.Exists(sha256Path))
|
||||
{
|
||||
var checksumResult = await VerifyBundleChecksumAsync(bundlePath, sha256Path, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (!checksumResult.IsValid)
|
||||
{
|
||||
return DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.ChecksumMismatch,
|
||||
"SHA-256 checksum mismatch",
|
||||
$"Expected: {checksumResult.ExpectedHash}, Computed: {checksumResult.ActualHash}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug("No .sha256 file found, skipping checksum verification");
|
||||
}
|
||||
|
||||
// Step 3: Extract and parse bundle contents
|
||||
BundleContents contents;
|
||||
try
|
||||
{
|
||||
contents = await ExtractBundleContentsAsync(bundlePath, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex) when (ex is InvalidDataException or JsonException or IOException)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to extract bundle contents");
|
||||
return DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.Unexpected,
|
||||
"Failed to extract bundle contents",
|
||||
ex.Message);
|
||||
}
|
||||
|
||||
// Step 4: Verify DSSE signature
|
||||
var signatureValid = VerifyDsseSignature(contents, offline, out var signatureError);
|
||||
if (!signatureValid && !string.IsNullOrEmpty(signatureError))
|
||||
{
|
||||
return DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.SignatureFailure,
|
||||
"DSSE signature verification failed",
|
||||
signatureError);
|
||||
}
|
||||
|
||||
// Step 5: Verify TSA (only if not offline)
|
||||
if (!offline && contents.Signature is not null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(contents.Signature.TimestampAuthority) ||
|
||||
string.IsNullOrEmpty(contents.Signature.TimestampToken))
|
||||
{
|
||||
return DevPortalBundleVerificationResult.Failed(
|
||||
DevPortalVerifyExitCode.TsaMissing,
|
||||
"RFC3161 timestamp missing",
|
||||
"Bundle requires timestamping when not in offline mode");
|
||||
}
|
||||
}
|
||||
|
||||
// Step 6: Build success result
|
||||
return new DevPortalBundleVerificationResult
|
||||
{
|
||||
Status = "verified",
|
||||
BundleId = contents.Manifest?.BundleId ?? contents.BundleMetadata?.BundleId,
|
||||
RootHash = contents.BundleMetadata?.RootHash is not null
|
||||
? $"sha256:{contents.BundleMetadata.RootHash}"
|
||||
: null,
|
||||
Entries = contents.Manifest?.Entries?.Count ?? 0,
|
||||
CreatedAt = contents.Manifest?.CreatedAt ?? contents.BundleMetadata?.CreatedAt,
|
||||
Portable = contents.BundleMetadata?.PortableGeneratedAt is not null,
|
||||
ExitCode = DevPortalVerifyExitCode.Success
|
||||
};
|
||||
}
|
||||
|
||||
private async Task<(bool IsValid, string? ExpectedHash, string? ActualHash)> VerifyBundleChecksumAsync(
|
||||
string bundlePath,
|
||||
string sha256Path,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Read expected hash from .sha256 file
|
||||
var content = await File.ReadAllTextAsync(sha256Path, cancellationToken).ConfigureAwait(false);
|
||||
var expectedHash = content.Split(' ', StringSplitOptions.RemoveEmptyEntries).FirstOrDefault()?.Trim()?.ToLowerInvariant();
|
||||
|
||||
if (string.IsNullOrEmpty(expectedHash))
|
||||
{
|
||||
return (false, null, null);
|
||||
}
|
||||
|
||||
// Compute actual hash
|
||||
await using var stream = File.OpenRead(bundlePath);
|
||||
var hashBytes = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
var actualHash = Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||
|
||||
return (string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase), expectedHash, actualHash);
|
||||
}
|
||||
|
||||
private async Task<BundleContents> ExtractBundleContentsAsync(
|
||||
string bundlePath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var contents = new BundleContents();
|
||||
|
||||
await using var fileStream = File.OpenRead(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
using var tarReader = new TarReader(gzipStream);
|
||||
|
||||
TarEntry? entry;
|
||||
while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) is not null)
|
||||
{
|
||||
if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
using var memoryStream = new MemoryStream();
|
||||
await entry.DataStream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
|
||||
var json = System.Text.Encoding.UTF8.GetString(memoryStream.ToArray());
|
||||
|
||||
switch (entry.Name)
|
||||
{
|
||||
case "manifest.json":
|
||||
contents.ManifestJson = json;
|
||||
contents.Manifest = JsonSerializer.Deserialize<BundleManifest>(json, SerializerOptions);
|
||||
break;
|
||||
case "signature.json":
|
||||
contents.SignatureJson = json;
|
||||
contents.Signature = JsonSerializer.Deserialize<BundleSignature>(json, SerializerOptions);
|
||||
break;
|
||||
case "bundle.json":
|
||||
contents.BundleMetadataJson = json;
|
||||
contents.BundleMetadata = JsonSerializer.Deserialize<BundleMetadataDocument>(json, SerializerOptions);
|
||||
break;
|
||||
case "checksums.txt":
|
||||
contents.ChecksumsText = json;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return contents;
|
||||
}
|
||||
|
||||
private bool VerifyDsseSignature(BundleContents contents, bool offline, out string? error)
|
||||
{
|
||||
error = null;
|
||||
|
||||
if (contents.Signature is null || string.IsNullOrEmpty(contents.Signature.Payload))
|
||||
{
|
||||
error = "Signature not found in bundle";
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify payload matches manifest
|
||||
if (contents.ManifestJson is not null)
|
||||
{
|
||||
try
|
||||
{
|
||||
var payloadBytes = Convert.FromBase64String(contents.Signature.Payload);
|
||||
var payloadJson = System.Text.Encoding.UTF8.GetString(payloadBytes);
|
||||
|
||||
// Compare parsed JSON to handle whitespace differences
|
||||
using var manifestDoc = JsonDocument.Parse(contents.ManifestJson);
|
||||
using var payloadDoc = JsonDocument.Parse(payloadJson);
|
||||
|
||||
var manifestBundleId = manifestDoc.RootElement.TryGetProperty("bundleId", out var mId)
|
||||
? mId.GetString()
|
||||
: null;
|
||||
var payloadBundleId = payloadDoc.RootElement.TryGetProperty("bundleId", out var pId)
|
||||
? pId.GetString()
|
||||
: null;
|
||||
|
||||
if (!string.Equals(manifestBundleId, payloadBundleId, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
error = "Signature payload does not match manifest bundleId";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (FormatException ex)
|
||||
{
|
||||
error = $"Invalid signature payload encoding: {ex.Message}";
|
||||
return false;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
error = $"Invalid signature payload JSON: {ex.Message}";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// In offline mode, we don't verify the actual cryptographic signature
|
||||
// (would require access to signing keys/certificates)
|
||||
if (offline)
|
||||
{
|
||||
_logger.LogDebug("Offline mode: skipping cryptographic signature verification");
|
||||
return true;
|
||||
}
|
||||
|
||||
// Online signature verification would go here
|
||||
// For now, we trust the signature if payload matches
|
||||
return true;
|
||||
}
|
||||
|
||||
private sealed class BundleContents
|
||||
{
|
||||
public string? ManifestJson { get; set; }
|
||||
public BundleManifest? Manifest { get; set; }
|
||||
public string? SignatureJson { get; set; }
|
||||
public BundleSignature? Signature { get; set; }
|
||||
public string? BundleMetadataJson { get; set; }
|
||||
public BundleMetadataDocument? BundleMetadata { get; set; }
|
||||
public string? ChecksumsText { get; set; }
|
||||
}
|
||||
|
||||
private sealed class BundleManifest
|
||||
{
|
||||
public string? BundleId { get; set; }
|
||||
public string? TenantId { get; set; }
|
||||
public int Kind { get; set; }
|
||||
public DateTimeOffset? CreatedAt { get; set; }
|
||||
public Dictionary<string, string>? Metadata { get; set; }
|
||||
public List<BundleManifestEntry>? Entries { get; set; }
|
||||
}
|
||||
|
||||
private sealed class BundleManifestEntry
|
||||
{
|
||||
public string? Section { get; set; }
|
||||
public string? CanonicalPath { get; set; }
|
||||
public string? Sha256 { get; set; }
|
||||
public long SizeBytes { get; set; }
|
||||
public string? MediaType { get; set; }
|
||||
}
|
||||
|
||||
private sealed class BundleSignature
|
||||
{
|
||||
public string? PayloadType { get; set; }
|
||||
public string? Payload { get; set; }
|
||||
public string? Signature { get; set; }
|
||||
public string? KeyId { get; set; }
|
||||
public string? Algorithm { get; set; }
|
||||
public string? Provider { get; set; }
|
||||
public DateTimeOffset? SignedAt { get; set; }
|
||||
public DateTimeOffset? TimestampedAt { get; set; }
|
||||
public string? TimestampAuthority { get; set; }
|
||||
public string? TimestampToken { get; set; }
|
||||
}
|
||||
|
||||
private sealed class BundleMetadataDocument
|
||||
{
|
||||
public string? BundleId { get; set; }
|
||||
public string? TenantId { get; set; }
|
||||
public int Kind { get; set; }
|
||||
public int Status { get; set; }
|
||||
public string? RootHash { get; set; }
|
||||
public string? StorageKey { get; set; }
|
||||
public DateTimeOffset? CreatedAt { get; set; }
|
||||
public DateTimeOffset? SealedAt { get; set; }
|
||||
public DateTimeOffset? PortableGeneratedAt { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exit codes for DevPortal bundle verification per DVOFF-64-002.
|
||||
/// </summary>
|
||||
public enum DevPortalVerifyExitCode
|
||||
{
|
||||
/// <summary>Verification successful.</summary>
|
||||
Success = 0,
|
||||
|
||||
/// <summary>SHA-256 checksum mismatch.</summary>
|
||||
ChecksumMismatch = 2,
|
||||
|
||||
/// <summary>DSSE signature verification failed.</summary>
|
||||
SignatureFailure = 3,
|
||||
|
||||
/// <summary>RFC3161 timestamp missing (when not offline).</summary>
|
||||
TsaMissing = 4,
|
||||
|
||||
/// <summary>Unexpected error.</summary>
|
||||
Unexpected = 5
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of DevPortal bundle verification.
|
||||
/// </summary>
|
||||
public sealed class DevPortalBundleVerificationResult
|
||||
{
|
||||
public string Status { get; set; } = "failed";
|
||||
public string? BundleId { get; set; }
|
||||
public string? RootHash { get; set; }
|
||||
public int Entries { get; set; }
|
||||
public DateTimeOffset? CreatedAt { get; set; }
|
||||
public bool Portable { get; set; }
|
||||
public DevPortalVerifyExitCode ExitCode { get; set; } = DevPortalVerifyExitCode.Unexpected;
|
||||
public string? ErrorMessage { get; set; }
|
||||
public string? ErrorDetail { get; set; }
|
||||
|
||||
public static DevPortalBundleVerificationResult Failed(
|
||||
DevPortalVerifyExitCode exitCode,
|
||||
string message,
|
||||
string? detail = null)
|
||||
=> new()
|
||||
{
|
||||
Status = "failed",
|
||||
ExitCode = exitCode,
|
||||
ErrorMessage = message,
|
||||
ErrorDetail = detail
|
||||
};
|
||||
|
||||
public string ToJson()
|
||||
{
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
// Build output with sorted keys
|
||||
var output = new SortedDictionary<string, object?>(StringComparer.Ordinal);
|
||||
|
||||
if (BundleId is not null)
|
||||
output["bundleId"] = BundleId;
|
||||
if (CreatedAt.HasValue)
|
||||
output["createdAt"] = CreatedAt.Value.ToString("O");
|
||||
output["entries"] = Entries;
|
||||
if (ErrorDetail is not null)
|
||||
output["errorDetail"] = ErrorDetail;
|
||||
if (ErrorMessage is not null)
|
||||
output["errorMessage"] = ErrorMessage;
|
||||
output["portable"] = Portable;
|
||||
if (RootHash is not null)
|
||||
output["rootHash"] = RootHash;
|
||||
output["status"] = Status;
|
||||
|
||||
return JsonSerializer.Serialize(output, options);
|
||||
}
|
||||
}
|
||||
29
src/Cli/StellaOps.Cli/Services/IAttestationBundleVerifier.cs
Normal file
29
src/Cli/StellaOps.Cli/Services/IAttestationBundleVerifier.cs
Normal file
@@ -0,0 +1,29 @@
|
||||
using StellaOps.Cli.Services.Models;
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for attestation bundle verification.
|
||||
/// </summary>
|
||||
public interface IAttestationBundleVerifier
|
||||
{
|
||||
/// <summary>
|
||||
/// Verifies an attestation bundle exported from the Export Center.
|
||||
/// </summary>
|
||||
/// <param name="options">Verification options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Verification result with status and exit code.</returns>
|
||||
Task<AttestationBundleVerifyResult> VerifyAsync(
|
||||
AttestationBundleVerifyOptions options,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Imports an attestation bundle into the local system.
|
||||
/// </summary>
|
||||
/// <param name="options">Import options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Import result with status and exit code.</returns>
|
||||
Task<AttestationBundleImportResult> ImportAsync(
|
||||
AttestationBundleImportOptions options,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
19
src/Cli/StellaOps.Cli/Services/IDevPortalBundleVerifier.cs
Normal file
19
src/Cli/StellaOps.Cli/Services/IDevPortalBundleVerifier.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for DevPortal bundle verification.
|
||||
/// </summary>
|
||||
public interface IDevPortalBundleVerifier
|
||||
{
|
||||
/// <summary>
|
||||
/// Verifies a DevPortal/EvidenceLocker sealed bundle.
|
||||
/// </summary>
|
||||
/// <param name="bundlePath">Path to the bundle .tgz file.</param>
|
||||
/// <param name="offline">If true, skip TSA verification and online checks.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Verification result with status and exit code.</returns>
|
||||
Task<DevPortalBundleVerificationResult> VerifyBundleAsync(
|
||||
string bundlePath,
|
||||
bool offline,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
126
src/Cli/StellaOps.Cli/Services/Models/AttestationBundleModels.cs
Normal file
126
src/Cli/StellaOps.Cli/Services/Models/AttestationBundleModels.cs
Normal file
@@ -0,0 +1,126 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Cli.Services.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Options for attestation bundle verification.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleVerifyOptions(
|
||||
string FilePath,
|
||||
bool Offline = false,
|
||||
bool VerifyTransparency = true,
|
||||
string? TrustRootPath = null);
|
||||
|
||||
/// <summary>
|
||||
/// Options for attestation bundle import.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleImportOptions(
|
||||
string FilePath,
|
||||
string? Tenant = null,
|
||||
string? Namespace = null,
|
||||
bool Offline = false,
|
||||
bool VerifyTransparency = true,
|
||||
string? TrustRootPath = null);
|
||||
|
||||
/// <summary>
|
||||
/// Result of attestation bundle verification.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleVerifyResult(
|
||||
bool Success,
|
||||
string Status,
|
||||
string? ExportId,
|
||||
string? AttestationId,
|
||||
string? RootHash,
|
||||
IReadOnlyList<string>? Subjects,
|
||||
string? PredicateType,
|
||||
string? StatementVersion,
|
||||
string BundlePath,
|
||||
string? ErrorMessage = null,
|
||||
int ExitCode = 0);
|
||||
|
||||
/// <summary>
|
||||
/// Result of attestation bundle import.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleImportResult(
|
||||
bool Success,
|
||||
string Status,
|
||||
string? AttestationId,
|
||||
string? TenantId,
|
||||
string? Namespace,
|
||||
string? RootHash,
|
||||
string? ErrorMessage = null,
|
||||
int ExitCode = 0);
|
||||
|
||||
/// <summary>
|
||||
/// JSON output for attestation bundle verify command.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleVerifyJson(
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("exportId")] string? ExportId,
|
||||
[property: JsonPropertyName("attestationId")] string? AttestationId,
|
||||
[property: JsonPropertyName("rootHash")] string? RootHash,
|
||||
[property: JsonPropertyName("subjects")] IReadOnlyList<string>? Subjects,
|
||||
[property: JsonPropertyName("predicateType")] string? PredicateType,
|
||||
[property: JsonPropertyName("bundlePath")] string BundlePath);
|
||||
|
||||
/// <summary>
|
||||
/// JSON output for attestation bundle import command.
|
||||
/// </summary>
|
||||
public sealed record AttestationBundleImportJson(
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("attestationId")] string? AttestationId,
|
||||
[property: JsonPropertyName("tenantId")] string? TenantId,
|
||||
[property: JsonPropertyName("namespace")] string? Namespace,
|
||||
[property: JsonPropertyName("rootHash")] string? RootHash);
|
||||
|
||||
/// <summary>
|
||||
/// Exit codes for attestation bundle commands.
|
||||
/// </summary>
|
||||
public static class AttestationBundleExitCodes
|
||||
{
|
||||
/// <summary>Success.</summary>
|
||||
public const int Success = 0;
|
||||
|
||||
/// <summary>General failure.</summary>
|
||||
public const int GeneralFailure = 1;
|
||||
|
||||
/// <summary>Checksum mismatch.</summary>
|
||||
public const int ChecksumMismatch = 2;
|
||||
|
||||
/// <summary>DSSE signature verification failure.</summary>
|
||||
public const int SignatureFailure = 3;
|
||||
|
||||
/// <summary>Missing required TSA/CT log entry.</summary>
|
||||
public const int MissingTransparency = 4;
|
||||
|
||||
/// <summary>Archive or file format error.</summary>
|
||||
public const int FormatError = 5;
|
||||
|
||||
/// <summary>File not found.</summary>
|
||||
public const int FileNotFound = 6;
|
||||
|
||||
/// <summary>Import failed.</summary>
|
||||
public const int ImportFailed = 7;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metadata parsed from an attestation bundle.
|
||||
/// </summary>
|
||||
internal sealed record AttestationBundleMetadata(
|
||||
string? Version,
|
||||
string? ExportId,
|
||||
string? AttestationId,
|
||||
string? TenantId,
|
||||
DateTimeOffset? CreatedAtUtc,
|
||||
string? RootHash,
|
||||
string? SourceUri,
|
||||
string? StatementVersion,
|
||||
IReadOnlyList<AttestationBundleSubjectDigest>? SubjectDigests);
|
||||
|
||||
/// <summary>
|
||||
/// Subject digest from attestation bundle metadata.
|
||||
/// </summary>
|
||||
internal sealed record AttestationBundleSubjectDigest(
|
||||
string? Name,
|
||||
string? Digest,
|
||||
string? Algorithm);
|
||||
@@ -0,0 +1,406 @@
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Cli.Services;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
|
||||
namespace StellaOps.Cli.Tests;
|
||||
|
||||
public sealed class AttestationBundleVerifierTests : IDisposable
|
||||
{
|
||||
private readonly string _tempDir;
|
||||
private readonly AttestationBundleVerifier _verifier;
|
||||
|
||||
public AttestationBundleVerifierTests()
|
||||
{
|
||||
_tempDir = Path.Combine(Path.GetTempPath(), $"attest-bundle-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_tempDir);
|
||||
|
||||
_verifier = new AttestationBundleVerifier(NullLogger<AttestationBundleVerifier>.Instance);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_tempDir))
|
||||
{
|
||||
Directory.Delete(_tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_FileNotFound_ReturnsFileNotFoundCode()
|
||||
{
|
||||
var options = new AttestationBundleVerifyOptions(
|
||||
Path.Combine(_tempDir, "nonexistent.tgz"),
|
||||
Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.FileNotFound, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ValidBundle_ReturnsSuccess()
|
||||
{
|
||||
var bundlePath = await CreateValidBundleAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode);
|
||||
Assert.Equal("verified", result.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ValidBundle_ReturnsMetadata()
|
||||
{
|
||||
var bundlePath = await CreateValidBundleAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.ExportId);
|
||||
Assert.NotNull(result.AttestationId);
|
||||
Assert.NotNull(result.RootHash);
|
||||
Assert.StartsWith("sha256:", result.RootHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_CorruptedArchive_ReturnsFormatError()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, "corrupted.tgz");
|
||||
await File.WriteAllBytesAsync(bundlePath, Encoding.UTF8.GetBytes("not a valid tgz"));
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.FormatError, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ChecksumMismatch_ReturnsChecksumMismatchCode()
|
||||
{
|
||||
var bundlePath = await CreateBundleWithBadChecksumAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.ChecksumMismatch, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ExternalChecksumMismatch_ReturnsChecksumMismatchCode()
|
||||
{
|
||||
var bundlePath = await CreateValidBundleAsync();
|
||||
var checksumPath = bundlePath + ".sha256";
|
||||
await File.WriteAllTextAsync(checksumPath, "0000000000000000000000000000000000000000000000000000000000000000 " + Path.GetFileName(bundlePath));
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.ChecksumMismatch, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_MissingTransparency_WhenNotOffline_ReturnsMissingTransparencyCode()
|
||||
{
|
||||
var bundlePath = await CreateBundleWithoutTransparencyAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(
|
||||
bundlePath,
|
||||
Offline: false,
|
||||
VerifyTransparency: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.MissingTransparency, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_MissingTransparency_WhenOffline_ReturnsSuccess()
|
||||
{
|
||||
var bundlePath = await CreateBundleWithoutTransparencyAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(
|
||||
bundlePath,
|
||||
Offline: true,
|
||||
VerifyTransparency: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_MissingDssePayload_ReturnsSignatureFailure()
|
||||
{
|
||||
var bundlePath = await CreateBundleWithMissingDssePayloadAsync();
|
||||
|
||||
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
|
||||
|
||||
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.SignatureFailure, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportAsync_ValidBundle_ReturnsSuccess()
|
||||
{
|
||||
var bundlePath = await CreateValidBundleAsync();
|
||||
|
||||
var options = new AttestationBundleImportOptions(
|
||||
bundlePath,
|
||||
Tenant: "test-tenant",
|
||||
Namespace: "test-namespace",
|
||||
Offline: true);
|
||||
|
||||
var result = await _verifier.ImportAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode);
|
||||
Assert.Equal("imported", result.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportAsync_InvalidBundle_ReturnsVerificationFailed()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, "invalid.tgz");
|
||||
await File.WriteAllBytesAsync(bundlePath, Encoding.UTF8.GetBytes("not valid"));
|
||||
|
||||
var options = new AttestationBundleImportOptions(
|
||||
bundlePath,
|
||||
Tenant: "test-tenant",
|
||||
Offline: true);
|
||||
|
||||
var result = await _verifier.ImportAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal("verification_failed", result.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportAsync_InheritsTenantFromMetadata()
|
||||
{
|
||||
var bundlePath = await CreateValidBundleAsync();
|
||||
|
||||
var options = new AttestationBundleImportOptions(
|
||||
bundlePath,
|
||||
Tenant: null, // Not specified
|
||||
Offline: true);
|
||||
|
||||
var result = await _verifier.ImportAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.TenantId); // Should come from bundle metadata
|
||||
}
|
||||
|
||||
private async Task<string> CreateValidBundleAsync()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"valid-bundle-{Guid.NewGuid():N}.tgz");
|
||||
var exportId = Guid.NewGuid().ToString("D");
|
||||
var attestationId = Guid.NewGuid().ToString("D");
|
||||
var tenantId = Guid.NewGuid().ToString("D");
|
||||
|
||||
// Create statement JSON
|
||||
var statement = new
|
||||
{
|
||||
_type = "https://in-toto.io/Statement/v1",
|
||||
predicateType = "https://stellaops.io/attestations/vuln-scan/v1",
|
||||
subject = new[]
|
||||
{
|
||||
new { name = "test-image:latest", digest = new Dictionary<string, string> { ["sha256"] = "abc123" } }
|
||||
},
|
||||
predicate = new { }
|
||||
};
|
||||
var statementJson = JsonSerializer.Serialize(statement);
|
||||
var statementBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(statementJson));
|
||||
|
||||
// Create DSSE envelope
|
||||
var dsse = new
|
||||
{
|
||||
payloadType = "application/vnd.in-toto+json",
|
||||
payload = statementBase64,
|
||||
signatures = new[]
|
||||
{
|
||||
new { keyid = "key-001", sig = "fake-signature-for-test" }
|
||||
}
|
||||
};
|
||||
var dsseJson = JsonSerializer.Serialize(dsse);
|
||||
|
||||
// Create metadata
|
||||
var metadata = new
|
||||
{
|
||||
version = "attestation-bundle/v1",
|
||||
exportId,
|
||||
attestationId,
|
||||
tenantId,
|
||||
createdAtUtc = DateTimeOffset.UtcNow.ToString("O"),
|
||||
rootHash = "abc123def456",
|
||||
statementVersion = "v1"
|
||||
};
|
||||
var metadataJson = JsonSerializer.Serialize(metadata);
|
||||
|
||||
// Create transparency entries
|
||||
var transparencyNdjson = "{\"logIndex\":1,\"logId\":\"test\"}\n";
|
||||
|
||||
// Calculate checksums
|
||||
var dsseHash = ComputeHash(dsseJson);
|
||||
var statementHash = ComputeHash(statementJson);
|
||||
var metadataHash = ComputeHash(metadataJson);
|
||||
var transparencyHash = ComputeHash(transparencyNdjson);
|
||||
|
||||
var checksums = new StringBuilder();
|
||||
checksums.AppendLine("# Attestation bundle checksums (sha256)");
|
||||
checksums.AppendLine($"{dsseHash} attestation.dsse.json");
|
||||
checksums.AppendLine($"{metadataHash} metadata.json");
|
||||
checksums.AppendLine($"{statementHash} statement.json");
|
||||
checksums.AppendLine($"{transparencyHash} transparency.ndjson");
|
||||
var checksumsText = checksums.ToString();
|
||||
|
||||
// Create archive
|
||||
await using var fileStream = File.Create(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
|
||||
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
|
||||
|
||||
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
|
||||
await WriteEntryAsync(tarWriter, "checksums.txt", checksumsText);
|
||||
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
|
||||
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
|
||||
await WriteEntryAsync(tarWriter, "transparency.ndjson", transparencyNdjson);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private async Task<string> CreateBundleWithoutTransparencyAsync()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"no-transparency-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
var statement = new
|
||||
{
|
||||
_type = "https://in-toto.io/Statement/v1",
|
||||
predicateType = "https://stellaops.io/attestations/vuln-scan/v1",
|
||||
subject = new[] { new { name = "test", digest = new Dictionary<string, string> { ["sha256"] = "abc" } } }
|
||||
};
|
||||
var statementJson = JsonSerializer.Serialize(statement);
|
||||
var statementBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(statementJson));
|
||||
|
||||
var dsse = new
|
||||
{
|
||||
payloadType = "application/vnd.in-toto+json",
|
||||
payload = statementBase64,
|
||||
signatures = new[] { new { keyid = "key-001", sig = "fake-sig" } }
|
||||
};
|
||||
var dsseJson = JsonSerializer.Serialize(dsse);
|
||||
|
||||
var metadata = new
|
||||
{
|
||||
version = "attestation-bundle/v1",
|
||||
exportId = Guid.NewGuid().ToString("D"),
|
||||
attestationId = Guid.NewGuid().ToString("D"),
|
||||
tenantId = Guid.NewGuid().ToString("D"),
|
||||
rootHash = "abc123"
|
||||
};
|
||||
var metadataJson = JsonSerializer.Serialize(metadata);
|
||||
|
||||
var dsseHash = ComputeHash(dsseJson);
|
||||
var statementHash = ComputeHash(statementJson);
|
||||
var metadataHash = ComputeHash(metadataJson);
|
||||
|
||||
var checksums = $"# Checksums\n{dsseHash} attestation.dsse.json\n{metadataHash} metadata.json\n{statementHash} statement.json\n";
|
||||
|
||||
await using var fileStream = File.Create(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
|
||||
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
|
||||
|
||||
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
|
||||
await WriteEntryAsync(tarWriter, "checksums.txt", checksums);
|
||||
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
|
||||
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
|
||||
// No transparency.ndjson
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private async Task<string> CreateBundleWithBadChecksumAsync()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"bad-checksum-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
var dsseJson = "{\"payloadType\":\"test\",\"payload\":\"dGVzdA==\",\"signatures\":[{\"keyid\":\"k\",\"sig\":\"s\"}]}";
|
||||
var statementJson = "{\"_type\":\"test\"}";
|
||||
var metadataJson = "{\"version\":\"v1\"}";
|
||||
|
||||
// Intentionally wrong checksum
|
||||
var checksums = "0000000000000000000000000000000000000000000000000000000000000000 attestation.dsse.json\n";
|
||||
|
||||
await using var fileStream = File.Create(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
|
||||
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
|
||||
|
||||
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
|
||||
await WriteEntryAsync(tarWriter, "checksums.txt", checksums);
|
||||
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
|
||||
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private async Task<string> CreateBundleWithMissingDssePayloadAsync()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"no-dsse-payload-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
// DSSE without payload
|
||||
var dsseJson = "{\"payloadType\":\"test\",\"signatures\":[]}";
|
||||
var statementJson = "{\"_type\":\"test\"}";
|
||||
var metadataJson = "{\"version\":\"v1\"}";
|
||||
|
||||
var dsseHash = ComputeHash(dsseJson);
|
||||
var statementHash = ComputeHash(statementJson);
|
||||
var metadataHash = ComputeHash(metadataJson);
|
||||
var checksums = $"{dsseHash} attestation.dsse.json\n{metadataHash} metadata.json\n{statementHash} statement.json\n";
|
||||
|
||||
await using var fileStream = File.Create(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
|
||||
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
|
||||
|
||||
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
|
||||
await WriteEntryAsync(tarWriter, "checksums.txt", checksums);
|
||||
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
|
||||
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private static async Task WriteEntryAsync(TarWriter writer, string name, string content)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
using var dataStream = new MemoryStream(bytes);
|
||||
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
|
||||
{
|
||||
DataStream = dataStream
|
||||
};
|
||||
await writer.WriteEntryAsync(entry);
|
||||
}
|
||||
|
||||
private static string ComputeHash(string content)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
var hash = System.Security.Cryptography.SHA256.HashData(bytes);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,316 @@
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Cli.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Cli.Tests.Services;
|
||||
|
||||
public sealed class DevPortalBundleVerifierTests : IDisposable
|
||||
{
|
||||
private readonly string _tempDir;
|
||||
private readonly DevPortalBundleVerifier _verifier;
|
||||
|
||||
public DevPortalBundleVerifierTests()
|
||||
{
|
||||
_tempDir = Path.Combine(Path.GetTempPath(), $"devportal-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_tempDir);
|
||||
_verifier = new DevPortalBundleVerifier(NullLogger<DevPortalBundleVerifier>.Instance);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_tempDir))
|
||||
{
|
||||
Directory.Delete(_tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_ReturnsSuccess_ForValidBundle()
|
||||
{
|
||||
var bundlePath = CreateValidBundle();
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
|
||||
|
||||
Assert.Equal("verified", result.Status);
|
||||
Assert.Equal(DevPortalVerifyExitCode.Success, result.ExitCode);
|
||||
Assert.Equal("a1b2c3d4-e5f6-7890-abcd-ef1234567890", result.BundleId);
|
||||
Assert.NotNull(result.RootHash);
|
||||
Assert.True(result.RootHash!.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase));
|
||||
Assert.Equal(1, result.Entries);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_ReturnsUnexpected_WhenBundleNotFound()
|
||||
{
|
||||
var nonExistentPath = Path.Combine(_tempDir, "nonexistent.tgz");
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(nonExistentPath, offline: true, CancellationToken.None);
|
||||
|
||||
Assert.Equal("failed", result.Status);
|
||||
Assert.Equal(DevPortalVerifyExitCode.Unexpected, result.ExitCode);
|
||||
Assert.Contains("not found", result.ErrorMessage, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_ReturnsChecksumMismatch_WhenSha256DoesNotMatch()
|
||||
{
|
||||
var bundlePath = CreateValidBundle();
|
||||
var sha256Path = bundlePath + ".sha256";
|
||||
|
||||
// Write incorrect hash
|
||||
await File.WriteAllTextAsync(sha256Path, "0000000000000000000000000000000000000000000000000000000000000000 bundle.tgz");
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
|
||||
|
||||
Assert.Equal("failed", result.Status);
|
||||
Assert.Equal(DevPortalVerifyExitCode.ChecksumMismatch, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_SucceedsWithoutSha256File()
|
||||
{
|
||||
var bundlePath = CreateValidBundle();
|
||||
|
||||
// Remove .sha256 file if exists
|
||||
var sha256Path = bundlePath + ".sha256";
|
||||
if (File.Exists(sha256Path))
|
||||
{
|
||||
File.Delete(sha256Path);
|
||||
}
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
|
||||
|
||||
Assert.Equal("verified", result.Status);
|
||||
Assert.Equal(DevPortalVerifyExitCode.Success, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_ReturnsTsaMissing_WhenOnlineAndNoTimestamp()
|
||||
{
|
||||
var bundlePath = CreateBundleWithoutTimestamp();
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: false, CancellationToken.None);
|
||||
|
||||
Assert.Equal("failed", result.Status);
|
||||
Assert.Equal(DevPortalVerifyExitCode.TsaMissing, result.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundleAsync_DetectsPortableBundle()
|
||||
{
|
||||
var bundlePath = CreatePortableBundle();
|
||||
|
||||
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
|
||||
|
||||
Assert.Equal("verified", result.Status);
|
||||
Assert.True(result.Portable);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToJson_OutputsKeysSortedAlphabetically()
|
||||
{
|
||||
var result = new DevPortalBundleVerificationResult
|
||||
{
|
||||
Status = "verified",
|
||||
BundleId = "test-id",
|
||||
RootHash = "sha256:abc123",
|
||||
Entries = 3,
|
||||
CreatedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
Portable = false,
|
||||
ExitCode = DevPortalVerifyExitCode.Success
|
||||
};
|
||||
|
||||
var json = result.ToJson();
|
||||
|
||||
// Keys should be in alphabetical order
|
||||
var keys = JsonDocument.Parse(json).RootElement.EnumerateObject()
|
||||
.Select(p => p.Name)
|
||||
.ToList();
|
||||
|
||||
var sortedKeys = keys.OrderBy(k => k, StringComparer.Ordinal).ToList();
|
||||
Assert.Equal(sortedKeys, keys);
|
||||
}
|
||||
|
||||
private string CreateValidBundle()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"bundle-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
var manifest = new
|
||||
{
|
||||
bundleId = "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
|
||||
tenantId = "00000000-0000-0000-0000-000000000001",
|
||||
kind = 2,
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
metadata = new Dictionary<string, string> { ["source"] = "test" },
|
||||
entries = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
section = "sbom",
|
||||
canonicalPath = "sbom/cyclonedx.json",
|
||||
sha256 = new string('a', 64),
|
||||
sizeBytes = 1024,
|
||||
mediaType = "application/vnd.cyclonedx+json"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var manifestJson = JsonSerializer.Serialize(manifest, new JsonSerializerOptions { WriteIndented = false });
|
||||
var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson));
|
||||
|
||||
var signature = new
|
||||
{
|
||||
payloadType = "application/vnd.stella.evidence.manifest+json",
|
||||
payload = manifestPayload,
|
||||
signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")),
|
||||
keyId = "key-1",
|
||||
algorithm = "ES256",
|
||||
provider = "StellaOps",
|
||||
signedAt = "2025-12-07T10:30:05Z",
|
||||
timestampedAt = "2025-12-07T10:30:06Z",
|
||||
timestampAuthority = "https://freetsa.org/tsr",
|
||||
timestampToken = Convert.ToBase64String(Encoding.UTF8.GetBytes("tsa-token"))
|
||||
};
|
||||
|
||||
var bundleMetadata = new
|
||||
{
|
||||
bundleId = "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
|
||||
tenantId = "00000000-0000-0000-0000-000000000001",
|
||||
kind = 2,
|
||||
status = 3,
|
||||
rootHash = new string('f', 64),
|
||||
storageKey = "evidence/bundle.tgz",
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
sealedAt = "2025-12-07T10:30:05Z"
|
||||
};
|
||||
|
||||
CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private string CreateBundleWithoutTimestamp()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"bundle-no-tsa-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
var manifest = new
|
||||
{
|
||||
bundleId = "b2c3d4e5-f6a7-8901-bcde-f23456789012",
|
||||
tenantId = "00000000-0000-0000-0000-000000000001",
|
||||
kind = 2,
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
entries = Array.Empty<object>()
|
||||
};
|
||||
|
||||
var manifestJson = JsonSerializer.Serialize(manifest);
|
||||
var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson));
|
||||
|
||||
var signature = new
|
||||
{
|
||||
payloadType = "application/vnd.stella.evidence.manifest+json",
|
||||
payload = manifestPayload,
|
||||
signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")),
|
||||
keyId = "key-1",
|
||||
algorithm = "ES256",
|
||||
provider = "StellaOps",
|
||||
signedAt = "2025-12-07T10:30:05Z"
|
||||
// No timestampedAt, timestampAuthority, timestampToken
|
||||
};
|
||||
|
||||
var bundleMetadata = new
|
||||
{
|
||||
bundleId = "b2c3d4e5-f6a7-8901-bcde-f23456789012",
|
||||
tenantId = "00000000-0000-0000-0000-000000000001",
|
||||
kind = 2,
|
||||
status = 3,
|
||||
rootHash = new string('e', 64),
|
||||
storageKey = "evidence/bundle.tgz",
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
sealedAt = "2025-12-07T10:30:05Z"
|
||||
};
|
||||
|
||||
CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private string CreatePortableBundle()
|
||||
{
|
||||
var bundlePath = Path.Combine(_tempDir, $"portable-{Guid.NewGuid():N}.tgz");
|
||||
|
||||
var manifest = new
|
||||
{
|
||||
bundleId = "c3d4e5f6-a7b8-9012-cdef-345678901234",
|
||||
kind = 1,
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
entries = Array.Empty<object>()
|
||||
};
|
||||
|
||||
var manifestJson = JsonSerializer.Serialize(manifest);
|
||||
var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson));
|
||||
|
||||
var signature = new
|
||||
{
|
||||
payloadType = "application/vnd.stella.evidence.manifest+json",
|
||||
payload = manifestPayload,
|
||||
signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")),
|
||||
keyId = "key-1",
|
||||
algorithm = "ES256",
|
||||
provider = "StellaOps",
|
||||
signedAt = "2025-12-07T10:30:05Z",
|
||||
timestampedAt = "2025-12-07T10:30:06Z",
|
||||
timestampAuthority = "tsa.default",
|
||||
timestampToken = Convert.ToBase64String(Encoding.UTF8.GetBytes("tsa-token"))
|
||||
};
|
||||
|
||||
var bundleMetadata = new
|
||||
{
|
||||
bundleId = "c3d4e5f6-a7b8-9012-cdef-345678901234",
|
||||
kind = 1,
|
||||
status = 3,
|
||||
rootHash = new string('d', 64),
|
||||
createdAt = "2025-12-07T10:30:00Z",
|
||||
sealedAt = "2025-12-07T10:30:05Z",
|
||||
portableGeneratedAt = "2025-12-07T10:35:00Z" // Indicates portable bundle
|
||||
};
|
||||
|
||||
CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata);
|
||||
|
||||
return bundlePath;
|
||||
}
|
||||
|
||||
private static void CreateTgzBundle(string bundlePath, string manifestJson, object signature, object bundleMetadata)
|
||||
{
|
||||
using var memoryStream = new MemoryStream();
|
||||
using (var gzipStream = new GZipStream(memoryStream, CompressionLevel.Optimal, leaveOpen: true))
|
||||
using (var tarWriter = new TarWriter(gzipStream))
|
||||
{
|
||||
AddTarEntry(tarWriter, "manifest.json", manifestJson);
|
||||
AddTarEntry(tarWriter, "signature.json", JsonSerializer.Serialize(signature));
|
||||
AddTarEntry(tarWriter, "bundle.json", JsonSerializer.Serialize(bundleMetadata));
|
||||
AddTarEntry(tarWriter, "checksums.txt", $"# checksums\n{new string('f', 64)} sbom/cyclonedx.json\n");
|
||||
}
|
||||
|
||||
memoryStream.Position = 0;
|
||||
using var fileStream = File.Create(bundlePath);
|
||||
memoryStream.CopyTo(fileStream);
|
||||
}
|
||||
|
||||
private static void AddTarEntry(TarWriter writer, string name, string content)
|
||||
{
|
||||
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
|
||||
{
|
||||
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead,
|
||||
ModificationTime = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero)
|
||||
};
|
||||
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
entry.DataStream = new MemoryStream(bytes);
|
||||
writer.WriteEntry(entry);
|
||||
}
|
||||
}
|
||||
32
src/Concelier/Directory.Build.props
Normal file
32
src/Concelier/Directory.Build.props
Normal file
@@ -0,0 +1,32 @@
|
||||
<Project>
|
||||
<PropertyGroup>
|
||||
<!-- Keep Concelier test harness active while trimming Mongo dependencies. Allow opt-out per project. -->
|
||||
<UseConcelierTestInfra Condition="'$(UseConcelierTestInfra)'==''">true</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<!-- Concelier is migrating off MongoDB; strip implicit Mongo2Go/Mongo driver packages inherited from the repo root. -->
|
||||
<PackageReference Remove="Mongo2Go" />
|
||||
<PackageReference Remove="MongoDB.Driver" />
|
||||
</ItemGroup>
|
||||
<ItemGroup Condition="$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests')) and '$(UseConcelierTestInfra)'=='true'">
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" />
|
||||
<ProjectReference Include="$(MSBuildThisFileDirectory)__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj"
|
||||
Condition="Exists('$(MSBuildThisFileDirectory)__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj')" />
|
||||
<Using Include="StellaOps.Concelier.Testing"
|
||||
Condition="Exists('$(MSBuildThisFileDirectory)__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj')" />
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
<!-- Keep OpenSSL shim sources available to Mongo2Go-free test harnesses if needed. -->
|
||||
<ItemGroup Condition="$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests')) and '$(UseConcelierTestInfra)'=='true'">
|
||||
<None Include="$(MSBuildThisFileDirectory)..\..\tests\native\openssl-1.1\linux-x64\*.so.1.1"
|
||||
Link="native/linux-x64/%(Filename)%(Extension)"
|
||||
CopyToOutputDirectory="PreserveNewest" />
|
||||
<Compile Include="$(MSBuildThisFileDirectory)..\..\tests\shared\OpenSslLegacyShim.cs" Link="Shared/OpenSslLegacyShim.cs" />
|
||||
<Compile Include="$(MSBuildThisFileDirectory)..\..\tests\shared\OpenSslAutoInit.cs" Link="Shared/OpenSslAutoInit.cs" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -30,7 +30,7 @@ public sealed class RawDocumentStorage
|
||||
string uri,
|
||||
byte[] content,
|
||||
string? contentType,
|
||||
DateTimeOffset? expiresAt,
|
||||
DateTimeOffset? ExpiresAt,
|
||||
CancellationToken cancellationToken,
|
||||
Guid? documentId = null)
|
||||
{
|
||||
|
||||
@@ -418,7 +418,7 @@ public sealed class UbuntuConnector : IFeedConnector
|
||||
await _stateRepository.UpdateCursorAsync(SourceName, doc, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static string ComputeNoticeHash(BsonDocument document)
|
||||
private string ComputeNoticeHash(BsonDocument document)
|
||||
{
|
||||
var bytes = document.ToBson();
|
||||
var hash = _hash.ComputeHash(bytes, HashAlgorithms.Sha256);
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Linksets
|
||||
{
|
||||
public static class PolicyAuthSignalFactory
|
||||
{
|
||||
public static PolicyAuthSignal ToPolicyAuthSignal(AdvisoryLinkset linkset)
|
||||
{
|
||||
if (linkset is null) throw new ArgumentNullException(nameof(linkset));
|
||||
|
||||
var subject = linkset.Normalized?.Purls?.FirstOrDefault() ?? linkset.AdvisoryId;
|
||||
var evidenceUri = $"urn:linkset:{linkset.AdvisoryId}";
|
||||
|
||||
return new PolicyAuthSignal(
|
||||
Id: linkset.AdvisoryId,
|
||||
Tenant: linkset.TenantId,
|
||||
Subject: subject ?? string.Empty,
|
||||
Source: linkset.Source,
|
||||
SignalType: "reachability",
|
||||
Evidence: new[]
|
||||
{
|
||||
new PolicyAuthEvidence(evidenceUri)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record PolicyAuthSignal(
|
||||
string Id,
|
||||
string Tenant,
|
||||
string Subject,
|
||||
string Source,
|
||||
string SignalType,
|
||||
IReadOnlyList<PolicyAuthEvidence> Evidence);
|
||||
|
||||
public sealed record PolicyAuthEvidence(string Uri);
|
||||
}
|
||||
@@ -1,248 +1,276 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Text;
|
||||
using System.Globalization;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace MongoDB.Bson
|
||||
{
|
||||
public readonly struct ObjectId : IEquatable<ObjectId>
|
||||
public class BsonValue : IEquatable<BsonValue?>
|
||||
{
|
||||
public Guid Value { get; }
|
||||
public ObjectId(Guid value) => Value = value;
|
||||
public ObjectId(string value) => Value = Guid.TryParse(value, out var g) ? g : Guid.Empty;
|
||||
public static ObjectId GenerateNewId() => new(Guid.NewGuid());
|
||||
public static ObjectId Empty => new(Guid.Empty);
|
||||
public bool Equals(ObjectId other) => Value.Equals(other.Value);
|
||||
public override bool Equals(object? obj) => obj is ObjectId other && Equals(other);
|
||||
public override int GetHashCode() => Value.GetHashCode();
|
||||
public override string ToString() => Value.ToString("N");
|
||||
public static bool operator ==(ObjectId left, ObjectId right) => left.Equals(right);
|
||||
public static bool operator !=(ObjectId left, ObjectId right) => !left.Equals(right);
|
||||
}
|
||||
protected object? RawValue;
|
||||
|
||||
public enum BsonType { Document, Array, String, Boolean, Int32, Int64, Double, DateTime, Guid, Null }
|
||||
|
||||
public class BsonValue
|
||||
{
|
||||
protected readonly object? _value;
|
||||
public BsonValue(object? value) => _value = value;
|
||||
internal object? RawValue => _value;
|
||||
public static implicit operator BsonValue(string value) => new BsonString(value ?? string.Empty);
|
||||
public static implicit operator BsonValue(bool value) => new BsonBoolean(value);
|
||||
public static implicit operator BsonValue(int value) => new BsonInt32(value);
|
||||
public static implicit operator BsonValue(long value) => new BsonInt64(value);
|
||||
public static implicit operator BsonValue(double value) => new BsonDouble(value);
|
||||
public static implicit operator BsonValue(DateTime value) => new BsonDateTime(DateTime.SpecifyKind(value, DateTimeKind.Utc));
|
||||
public static implicit operator BsonValue(DateTimeOffset value) => new BsonDateTime(value.UtcDateTime);
|
||||
public static implicit operator BsonValue(Guid value) => new BsonString(value.ToString("D"));
|
||||
public static BsonValue Create(object? value) => BsonDocument.WrapExternal(value);
|
||||
public virtual BsonType BsonType => _value switch
|
||||
public BsonValue(object? value = null)
|
||||
{
|
||||
null => BsonType.Null,
|
||||
BsonDocument => BsonType.Document,
|
||||
BsonArray => BsonType.Array,
|
||||
string => BsonType.String,
|
||||
bool => BsonType.Boolean,
|
||||
int => BsonType.Int32,
|
||||
long => BsonType.Int64,
|
||||
double => BsonType.Double,
|
||||
DateTime => BsonType.DateTime,
|
||||
DateTimeOffset => BsonType.DateTime,
|
||||
Guid => BsonType.Guid,
|
||||
_ => BsonType.Null
|
||||
};
|
||||
public bool IsString => _value is string;
|
||||
public bool IsBsonDocument => _value is BsonDocument;
|
||||
public bool IsBsonArray => _value is BsonArray;
|
||||
public bool IsBsonNull => _value is null;
|
||||
public string AsString => _value?.ToString() ?? string.Empty;
|
||||
public BsonDocument AsBsonDocument => _value as BsonDocument ?? throw new InvalidCastException();
|
||||
public BsonArray AsBsonArray => _value as BsonArray ?? throw new InvalidCastException();
|
||||
public Guid AsGuid => _value is Guid g ? g : Guid.Empty;
|
||||
public DateTime AsDateTime => _value switch
|
||||
{
|
||||
DateTimeOffset dto => dto.UtcDateTime,
|
||||
DateTime dt => dt,
|
||||
_ => DateTime.MinValue
|
||||
};
|
||||
public int AsInt32 => _value is int i ? i : 0;
|
||||
public long AsInt64 => _value is long l ? l : 0;
|
||||
public double AsDouble => _value is double d ? d : 0d;
|
||||
public bool AsBoolean => _value is bool b && b;
|
||||
public bool IsInt32 => _value is int;
|
||||
public DateTime ToUniversalTime() => _value switch
|
||||
{
|
||||
DateTimeOffset dto => dto.UtcDateTime,
|
||||
DateTime dt => dt.Kind == DateTimeKind.Utc ? dt : dt.ToUniversalTime(),
|
||||
string s when DateTimeOffset.TryParse(s, out var parsed) => parsed.UtcDateTime,
|
||||
_ => DateTime.MinValue
|
||||
};
|
||||
public override string ToString() => _value?.ToString() ?? string.Empty;
|
||||
}
|
||||
|
||||
public class BsonString : BsonValue { public BsonString(string value) : base(value) { } }
|
||||
public class BsonBoolean : BsonValue { public BsonBoolean(bool value) : base(value) { } }
|
||||
public class BsonInt32 : BsonValue { public BsonInt32(int value) : base(value) { } }
|
||||
public class BsonInt64 : BsonValue { public BsonInt64(long value) : base(value) { } }
|
||||
public class BsonDouble : BsonValue { public BsonDouble(double value) : base(value) { } }
|
||||
public class BsonDateTime : BsonValue { public BsonDateTime(DateTime value) : base(value) { } }
|
||||
public class BsonNull : BsonValue
|
||||
{
|
||||
private BsonNull() : base(null) { }
|
||||
public static BsonNull Value { get; } = new();
|
||||
}
|
||||
|
||||
public sealed class BsonElement
|
||||
{
|
||||
public BsonElement(string name, BsonValue value)
|
||||
{
|
||||
Name = name;
|
||||
Value = value;
|
||||
RawValue = value;
|
||||
}
|
||||
|
||||
public string Name { get; }
|
||||
public BsonValue Value { get; }
|
||||
}
|
||||
public bool IsString => RawValue is string;
|
||||
public bool IsBoolean => RawValue is bool;
|
||||
public bool IsBsonDocument => RawValue is BsonDocument;
|
||||
public bool IsBsonArray => RawValue is BsonArray;
|
||||
|
||||
public class BsonBinaryData : BsonValue
|
||||
{
|
||||
private readonly byte[] _bytes;
|
||||
public BsonBinaryData(byte[] bytes) : base(null) => _bytes = bytes ?? Array.Empty<byte>();
|
||||
public BsonBinaryData(Guid guid) : this(guid.ToByteArray()) { }
|
||||
public byte[] AsByteArray => _bytes;
|
||||
public Guid ToGuid() => new(_bytes);
|
||||
}
|
||||
|
||||
public class BsonArray : BsonValue, IEnumerable<BsonValue>
|
||||
{
|
||||
private readonly List<BsonValue> _items = new();
|
||||
public BsonArray() : base(null) { }
|
||||
public BsonArray(IEnumerable<BsonValue> values) : this() => _items.AddRange(values);
|
||||
public BsonArray(IEnumerable<object?> values) : this()
|
||||
public string AsString => RawValue switch
|
||||
{
|
||||
foreach (var value in values)
|
||||
{
|
||||
_items.Add(BsonDocument.WrapExternal(value));
|
||||
}
|
||||
}
|
||||
public void Add(BsonValue value) => _items.Add(value);
|
||||
public IEnumerator<BsonValue> GetEnumerator() => _items.GetEnumerator();
|
||||
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
|
||||
public BsonValue this[int index] { get => _items[index]; set => _items[index] = value; }
|
||||
public int Count => _items.Count;
|
||||
null => string.Empty,
|
||||
string s => s,
|
||||
Guid g => g.ToString(),
|
||||
_ => Convert.ToString(RawValue, CultureInfo.InvariantCulture) ?? string.Empty
|
||||
};
|
||||
|
||||
public bool AsBoolean => RawValue switch
|
||||
{
|
||||
bool b => b,
|
||||
string s when bool.TryParse(s, out var b) => b,
|
||||
int i => i != 0,
|
||||
long l => l != 0,
|
||||
_ => false
|
||||
};
|
||||
|
||||
public int ToInt32() => RawValue switch
|
||||
{
|
||||
int i => i,
|
||||
long l => (int)l,
|
||||
double d => (int)d,
|
||||
string s when int.TryParse(s, NumberStyles.Any, CultureInfo.InvariantCulture, out var i) => i,
|
||||
_ => 0
|
||||
};
|
||||
|
||||
public Guid AsGuid => RawValue switch
|
||||
{
|
||||
Guid g => g,
|
||||
string s when Guid.TryParse(s, out var g) => g,
|
||||
_ => Guid.Empty
|
||||
};
|
||||
|
||||
public ObjectId AsObjectId => RawValue switch
|
||||
{
|
||||
ObjectId o => o,
|
||||
string s => ObjectId.Parse(s),
|
||||
_ => ObjectId.Empty
|
||||
};
|
||||
|
||||
public BsonDocument AsBsonDocument => RawValue as BsonDocument ?? (this as BsonDocument ?? new BsonDocument());
|
||||
public BsonArray AsBsonArray => RawValue as BsonArray ?? (this as BsonArray ?? new BsonArray());
|
||||
|
||||
public override string ToString() => AsString;
|
||||
|
||||
internal virtual BsonValue Clone() => new BsonValue(RawValue);
|
||||
|
||||
public bool Equals(BsonValue? other) => other is not null && Equals(RawValue, other.RawValue);
|
||||
public override bool Equals(object? obj) => obj is BsonValue other && Equals(other);
|
||||
public override int GetHashCode() => RawValue?.GetHashCode() ?? 0;
|
||||
|
||||
public static implicit operator BsonValue(string value) => new(value);
|
||||
public static implicit operator BsonValue(Guid value) => new(value);
|
||||
public static implicit operator BsonValue(int value) => new(value);
|
||||
public static implicit operator BsonValue(long value) => new(value);
|
||||
public static implicit operator BsonValue(bool value) => new(value);
|
||||
public static implicit operator BsonValue(double value) => new(value);
|
||||
public static implicit operator BsonValue(DateTimeOffset value) => new(value);
|
||||
}
|
||||
|
||||
public class BsonDocument : BsonValue, IEnumerable<KeyValuePair<string, BsonValue>>
|
||||
public sealed class BsonDocument : BsonValue, IDictionary<string, BsonValue>
|
||||
{
|
||||
private readonly Dictionary<string, BsonValue> _values = new(StringComparer.Ordinal);
|
||||
public BsonDocument() : base(null) { }
|
||||
public BsonDocument(string key, object? value) : this() => _values[key] = Wrap(value);
|
||||
public BsonDocument(IEnumerable<KeyValuePair<string, object?>> pairs) : this()
|
||||
|
||||
public BsonDocument()
|
||||
: base(null)
|
||||
{
|
||||
foreach (var kvp in pairs)
|
||||
RawValue = this;
|
||||
}
|
||||
|
||||
public BsonDocument(IDictionary<string, object?> values)
|
||||
: this()
|
||||
{
|
||||
foreach (var kvp in values)
|
||||
{
|
||||
_values[kvp.Key] = Wrap(kvp.Value);
|
||||
_values[kvp.Key] = ToBsonValue(kvp.Value);
|
||||
}
|
||||
}
|
||||
|
||||
private static BsonValue Wrap(object? value) => value switch
|
||||
{
|
||||
BsonValue v => v,
|
||||
IEnumerable<BsonValue> enumerable => new BsonArray(enumerable),
|
||||
IEnumerable<object?> objEnum => new BsonArray(objEnum.Select(Wrap)),
|
||||
_ => new BsonValue(value)
|
||||
};
|
||||
|
||||
internal static BsonValue WrapExternal(object? value) => Wrap(value);
|
||||
public int ElementCount => _values.Count;
|
||||
|
||||
public BsonValue this[string key]
|
||||
{
|
||||
get => _values[key];
|
||||
set => _values[key] = Wrap(value);
|
||||
set => _values[key] = value ?? new BsonValue();
|
||||
}
|
||||
|
||||
public int ElementCount => _values.Count;
|
||||
public IEnumerable<BsonElement> Elements => _values.Select(kvp => new BsonElement(kvp.Key, kvp.Value));
|
||||
public ICollection<string> Keys => _values.Keys;
|
||||
public ICollection<BsonValue> Values => _values.Values;
|
||||
public int Count => _values.Count;
|
||||
public bool IsReadOnly => false;
|
||||
|
||||
public bool Contains(string key) => _values.ContainsKey(key);
|
||||
public void Add(string key, BsonValue value) => _values[key] = value ?? new BsonValue();
|
||||
public void Add(string key, object? value) => _values[key] = ToBsonValue(value);
|
||||
public void Add(KeyValuePair<string, BsonValue> item) => Add(item.Key, item.Value);
|
||||
|
||||
public void Clear() => _values.Clear();
|
||||
public bool Contains(KeyValuePair<string, BsonValue> item) => _values.Contains(item);
|
||||
public bool ContainsKey(string key) => _values.ContainsKey(key);
|
||||
public void CopyTo(KeyValuePair<string, BsonValue>[] array, int arrayIndex) => ((IDictionary<string, BsonValue>)_values).CopyTo(array, arrayIndex);
|
||||
public IEnumerator<KeyValuePair<string, BsonValue>> GetEnumerator() => _values.GetEnumerator();
|
||||
IEnumerator IEnumerable.GetEnumerator() => _values.GetEnumerator();
|
||||
public bool Remove(string key) => _values.Remove(key);
|
||||
public bool Remove(KeyValuePair<string, BsonValue> item) => _values.Remove(item.Key);
|
||||
public bool TryGetValue(string key, out BsonValue value) => _values.TryGetValue(key, out value!);
|
||||
|
||||
public BsonValue GetValue(string key, BsonValue? defaultValue = null)
|
||||
{
|
||||
return _values.TryGetValue(key, out var value)
|
||||
? value
|
||||
: defaultValue ?? new BsonValue(null);
|
||||
}
|
||||
|
||||
public bool Remove(string key) => _values.Remove(key);
|
||||
|
||||
public void Add(string key, BsonValue value) => _values[key] = value;
|
||||
public void Add(string key, object? value) => _values[key] = Wrap(value);
|
||||
|
||||
public IEnumerator<KeyValuePair<string, BsonValue>> GetEnumerator() => _values.GetEnumerator();
|
||||
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
|
||||
public BsonValue GetValue(string key) => _values[key];
|
||||
|
||||
public BsonDocument DeepClone()
|
||||
{
|
||||
var clone = new BsonDocument();
|
||||
var copy = new BsonDocument();
|
||||
foreach (var kvp in _values)
|
||||
{
|
||||
clone[kvp.Key] = kvp.Value;
|
||||
copy._values[kvp.Key] = kvp.Value?.Clone() ?? new BsonValue();
|
||||
}
|
||||
return clone;
|
||||
return copy;
|
||||
}
|
||||
|
||||
public static BsonDocument Parse(string json)
|
||||
{
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
return FromElement(doc.RootElement);
|
||||
return FromElement(doc.RootElement).AsBsonDocument;
|
||||
}
|
||||
|
||||
private static BsonDocument FromElement(JsonElement element)
|
||||
private static BsonValue FromElement(JsonElement element)
|
||||
{
|
||||
return element.ValueKind switch
|
||||
{
|
||||
JsonValueKind.Object => FromObject(element),
|
||||
JsonValueKind.Array => FromArray(element),
|
||||
JsonValueKind.String => new BsonValue(element.GetString()),
|
||||
JsonValueKind.Number => element.TryGetInt64(out var l) ? new BsonValue(l) : new BsonValue(element.GetDouble()),
|
||||
JsonValueKind.True => new BsonValue(true),
|
||||
JsonValueKind.False => new BsonValue(false),
|
||||
JsonValueKind.Null or JsonValueKind.Undefined => new BsonValue(null),
|
||||
_ => new BsonValue(element.ToString())
|
||||
};
|
||||
}
|
||||
|
||||
private static BsonDocument FromObject(JsonElement element)
|
||||
{
|
||||
var doc = new BsonDocument();
|
||||
foreach (var prop in element.EnumerateObject())
|
||||
foreach (var property in element.EnumerateObject())
|
||||
{
|
||||
doc[prop.Name] = FromJsonValue(prop.Value);
|
||||
doc[property.Name] = FromElement(property.Value);
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
private static BsonValue FromJsonValue(JsonElement element) => element.ValueKind switch
|
||||
private static BsonArray FromArray(JsonElement element)
|
||||
{
|
||||
JsonValueKind.Object => FromElement(element),
|
||||
JsonValueKind.Array => new BsonArray(element.EnumerateArray().Select(FromJsonValue)),
|
||||
JsonValueKind.String => new BsonString(element.GetString() ?? string.Empty),
|
||||
JsonValueKind.Number => element.TryGetInt64(out var l) ? new BsonInt64(l) : new BsonDouble(element.GetDouble()),
|
||||
JsonValueKind.True => new BsonBoolean(true),
|
||||
JsonValueKind.False => new BsonBoolean(false),
|
||||
JsonValueKind.Null or JsonValueKind.Undefined => new BsonValue(null),
|
||||
_ => new BsonValue(null)
|
||||
};
|
||||
|
||||
public string ToJson(MongoDB.Bson.IO.JsonWriterSettings? settings = null)
|
||||
{
|
||||
var dict = _values.ToDictionary(kvp => kvp.Key, kvp => Unwrap(kvp.Value));
|
||||
return JsonSerializer.Serialize(dict, new JsonSerializerOptions(JsonSerializerDefaults.Web));
|
||||
var array = new BsonArray();
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
array.Add(FromElement(item));
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
public byte[] ToBson() => Encoding.UTF8.GetBytes(ToJson());
|
||||
|
||||
private static object? Unwrap(BsonValue value) => value switch
|
||||
internal static BsonValue ToBsonValue(object? value)
|
||||
{
|
||||
BsonDocument doc => doc._values.ToDictionary(kvp => kvp.Key, kvp => Unwrap(kvp.Value)),
|
||||
BsonArray array => array.Select(Unwrap).ToArray(),
|
||||
_ => value.RawValue
|
||||
};
|
||||
return value switch
|
||||
{
|
||||
null => new BsonValue(null),
|
||||
BsonValue bson => bson,
|
||||
string s => new BsonValue(s),
|
||||
Guid g => new BsonValue(g),
|
||||
int i => new BsonValue(i),
|
||||
long l => new BsonValue(l),
|
||||
bool b => new BsonValue(b),
|
||||
double d => new BsonValue(d),
|
||||
float f => new BsonValue(f),
|
||||
DateTime dt => new BsonValue(dt),
|
||||
DateTimeOffset dto => new BsonValue(dto),
|
||||
IEnumerable<object?> enumerable => new BsonArray(enumerable.Select(ToBsonValue)),
|
||||
_ => new BsonValue(value)
|
||||
};
|
||||
}
|
||||
|
||||
internal override BsonValue Clone() => DeepClone();
|
||||
}
|
||||
|
||||
public sealed class BsonArray : BsonValue, IList<BsonValue>
|
||||
{
|
||||
private readonly List<BsonValue> _items = new();
|
||||
|
||||
public BsonArray()
|
||||
: base(null)
|
||||
{
|
||||
RawValue = this;
|
||||
}
|
||||
|
||||
public BsonArray(IEnumerable<BsonValue> items)
|
||||
: this()
|
||||
{
|
||||
_items.AddRange(items);
|
||||
}
|
||||
|
||||
public BsonValue this[int index]
|
||||
{
|
||||
get => _items[index];
|
||||
set => _items[index] = value ?? new BsonValue();
|
||||
}
|
||||
|
||||
public int Count => _items.Count;
|
||||
public bool IsReadOnly => false;
|
||||
|
||||
public void Add(BsonValue item) => _items.Add(item ?? new BsonValue());
|
||||
public void Add(object? item) => _items.Add(BsonDocument.ToBsonValue(item));
|
||||
public void Clear() => _items.Clear();
|
||||
public bool Contains(BsonValue item) => _items.Contains(item);
|
||||
public void CopyTo(BsonValue[] array, int arrayIndex) => _items.CopyTo(array, arrayIndex);
|
||||
public IEnumerator<BsonValue> GetEnumerator() => _items.GetEnumerator();
|
||||
IEnumerator IEnumerable.GetEnumerator() => _items.GetEnumerator();
|
||||
public int IndexOf(BsonValue item) => _items.IndexOf(item);
|
||||
public void Insert(int index, BsonValue item) => _items.Insert(index, item ?? new BsonValue());
|
||||
public bool Remove(BsonValue item) => _items.Remove(item);
|
||||
public void RemoveAt(int index) => _items.RemoveAt(index);
|
||||
|
||||
internal override BsonValue Clone() => new BsonArray(_items.Select(i => i.Clone()));
|
||||
}
|
||||
|
||||
public readonly struct ObjectId : IEquatable<ObjectId>
|
||||
{
|
||||
private readonly string _value;
|
||||
|
||||
public ObjectId(string value)
|
||||
{
|
||||
_value = value;
|
||||
}
|
||||
|
||||
public static ObjectId Empty { get; } = new(string.Empty);
|
||||
|
||||
public override string ToString() => _value;
|
||||
|
||||
public static ObjectId Parse(string value) => new(value ?? string.Empty);
|
||||
|
||||
public bool Equals(ObjectId other) => string.Equals(_value, other._value, StringComparison.Ordinal);
|
||||
public override bool Equals(object? obj) => obj is ObjectId other && Equals(other);
|
||||
public override int GetHashCode() => _value?.GetHashCode(StringComparison.Ordinal) ?? 0;
|
||||
}
|
||||
}
|
||||
|
||||
namespace MongoDB.Bson.IO
|
||||
namespace MongoDB.Bson.Serialization.Attributes
|
||||
{
|
||||
public enum JsonOutputMode { Strict, RelaxedExtendedJson }
|
||||
public class JsonWriterSettings
|
||||
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Class | AttributeTargets.Struct)]
|
||||
public sealed class BsonElementAttribute : Attribute
|
||||
{
|
||||
public JsonOutputMode OutputMode { get; set; } = JsonOutputMode.Strict;
|
||||
public BsonElementAttribute(string elementName)
|
||||
{
|
||||
ElementName = elementName;
|
||||
}
|
||||
|
||||
public string ElementName { get; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
|
||||
namespace MongoDB.Driver
|
||||
{
|
||||
@@ -31,6 +32,7 @@ namespace MongoDB.Driver
|
||||
public interface IMongoClient
|
||||
{
|
||||
IMongoDatabase GetDatabase(string name, MongoDatabaseSettings? settings = null);
|
||||
Task DropDatabaseAsync(string name, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
public class MongoClient : IMongoClient
|
||||
@@ -38,20 +40,47 @@ namespace MongoDB.Driver
|
||||
public MongoClient(string connectionString) { }
|
||||
public MongoClient(MongoClientSettings settings) { }
|
||||
public IMongoDatabase GetDatabase(string name, MongoDatabaseSettings? settings = null) => new MongoDatabase(name);
|
||||
public Task DropDatabaseAsync(string name, CancellationToken cancellationToken = default) => Task.CompletedTask;
|
||||
}
|
||||
|
||||
public class MongoDatabaseSettings { }
|
||||
|
||||
public sealed class DatabaseNamespace
|
||||
{
|
||||
public DatabaseNamespace(string databaseName) => DatabaseName = databaseName;
|
||||
public string DatabaseName { get; }
|
||||
}
|
||||
|
||||
public interface IMongoDatabase
|
||||
{
|
||||
IMongoCollection<TDocument> GetCollection<TDocument>(string name, MongoCollectionSettings? settings = null);
|
||||
DatabaseNamespace DatabaseNamespace { get; }
|
||||
Task DropCollectionAsync(string name, CancellationToken cancellationToken = default);
|
||||
BsonDocument RunCommand(BsonDocument command, CancellationToken cancellationToken = default);
|
||||
T RunCommand<T>(BsonDocument command, CancellationToken cancellationToken = default);
|
||||
Task<T> RunCommandAsync<T>(BsonDocument command, CancellationToken cancellationToken = default);
|
||||
BsonDocument RunCommand(string command, CancellationToken cancellationToken = default);
|
||||
T RunCommand<T>(string command, CancellationToken cancellationToken = default);
|
||||
Task<T> RunCommandAsync<T>(string command, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
public class MongoDatabase : IMongoDatabase
|
||||
{
|
||||
public MongoDatabase(string name) => Name = name;
|
||||
public MongoDatabase(string name)
|
||||
{
|
||||
Name = name;
|
||||
DatabaseNamespace = new DatabaseNamespace(name);
|
||||
}
|
||||
public string Name { get; }
|
||||
public DatabaseNamespace DatabaseNamespace { get; }
|
||||
public IMongoCollection<TDocument> GetCollection<TDocument>(string name, MongoCollectionSettings? settings = null) => new MongoCollection<TDocument>(name);
|
||||
public Task DropCollectionAsync(string name, CancellationToken cancellationToken = default) => Task.CompletedTask;
|
||||
public BsonDocument RunCommand(BsonDocument command, CancellationToken cancellationToken = default) => new();
|
||||
public T RunCommand<T>(BsonDocument command, CancellationToken cancellationToken = default) => default!;
|
||||
public Task<T> RunCommandAsync<T>(BsonDocument command, CancellationToken cancellationToken = default) => Task.FromResult(default(T)!);
|
||||
public BsonDocument RunCommand(string command, CancellationToken cancellationToken = default) => new();
|
||||
public T RunCommand<T>(string command, CancellationToken cancellationToken = default) => default!;
|
||||
public Task<T> RunCommandAsync<T>(string command, CancellationToken cancellationToken = default) => Task.FromResult(default(T)!);
|
||||
}
|
||||
|
||||
public class MongoCollectionSettings { }
|
||||
@@ -59,8 +88,10 @@ namespace MongoDB.Driver
|
||||
public interface IMongoCollection<TDocument>
|
||||
{
|
||||
Task InsertOneAsync(TDocument document, InsertOneOptions? options = null, CancellationToken cancellationToken = default);
|
||||
Task InsertManyAsync(IEnumerable<TDocument> documents, InsertManyOptions? options = null, CancellationToken cancellationToken = default);
|
||||
Task<ReplaceOneResult> ReplaceOneAsync(FilterDefinition<TDocument> filter, TDocument replacement, ReplaceOptions? options = null, CancellationToken cancellationToken = default);
|
||||
Task<DeleteResult> DeleteOneAsync(FilterDefinition<TDocument> filter, CancellationToken cancellationToken = default);
|
||||
Task<DeleteResult> DeleteManyAsync(FilterDefinition<TDocument> filter, CancellationToken cancellationToken = default);
|
||||
Task<IAsyncCursor<TDocument>> FindAsync(FilterDefinition<TDocument> filter, FindOptions<TDocument, TDocument>? options = null, CancellationToken cancellationToken = default);
|
||||
IFindFluent<TDocument, TDocument> Find(FilterDefinition<TDocument> filter, FindOptions<TDocument, TDocument>? options = null);
|
||||
Task<long> CountDocumentsAsync(FilterDefinition<TDocument> filter, CountOptions? options = null, CancellationToken cancellationToken = default);
|
||||
@@ -88,6 +119,12 @@ namespace MongoDB.Driver
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task InsertManyAsync(IEnumerable<TDocument> documents, InsertManyOptions? options = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_docs.AddRange(documents);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<ReplaceOneResult> ReplaceOneAsync(FilterDefinition<TDocument> filter, TDocument replacement, ReplaceOptions? options = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_docs.Clear();
|
||||
@@ -102,6 +139,13 @@ namespace MongoDB.Driver
|
||||
return Task.FromResult(new DeleteResult(removed ? 1 : 0));
|
||||
}
|
||||
|
||||
public Task<DeleteResult> DeleteManyAsync(FilterDefinition<TDocument> filter, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var removed = _docs.Count;
|
||||
_docs.Clear();
|
||||
return Task.FromResult(new DeleteResult(removed));
|
||||
}
|
||||
|
||||
public Task<IAsyncCursor<TDocument>> FindAsync(FilterDefinition<TDocument> filter, FindOptions<TDocument, TDocument>? options = null, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IAsyncCursor<TDocument>>(new AsyncCursor<TDocument>(_docs));
|
||||
|
||||
@@ -212,7 +256,10 @@ namespace MongoDB.Driver
|
||||
=> new FindFluentProjected<TDocument, TNewProjection>(Enumerable.Empty<TNewProjection>());
|
||||
}
|
||||
|
||||
public class FilterDefinition<TDocument> { }
|
||||
public class FilterDefinition<TDocument>
|
||||
{
|
||||
public static FilterDefinition<TDocument> Empty { get; } = new();
|
||||
}
|
||||
public class UpdateDefinition<TDocument> { }
|
||||
public class ProjectionDefinition<TDocument, TProjection> { }
|
||||
public class SortDefinition<TDocument> { }
|
||||
@@ -222,6 +269,7 @@ namespace MongoDB.Driver
|
||||
public class FindOneAndReplaceOptions<TDocument, TProjection> { public bool IsUpsert { get; set; } }
|
||||
public class FindOneAndUpdateOptions<TDocument, TProjection> { public bool IsUpsert { get; set; } }
|
||||
public class InsertOneOptions { }
|
||||
public class InsertManyOptions { }
|
||||
public class CreateIndexOptions { }
|
||||
public class IndexKeysDefinition<TDocument> { }
|
||||
|
||||
@@ -284,7 +332,7 @@ namespace Mongo2Go
|
||||
|
||||
private MongoDbRunner(string connectionString) => ConnectionString = connectionString;
|
||||
|
||||
public static MongoDbRunner Start() => new("mongodb://localhost:27017/fake");
|
||||
public static MongoDbRunner Start(bool singleNodeReplSet = false) => new("mongodb://localhost:27017/fake");
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
|
||||
@@ -1,19 +1,27 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Models;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo
|
||||
{
|
||||
public static class MongoStorageDefaults
|
||||
{
|
||||
public const string DefaultDatabaseName = "concelier";
|
||||
|
||||
public static class Collections
|
||||
{
|
||||
public const string AdvisoryStatements = "advisory_statements";
|
||||
public const string AdvisoryRaw = "advisory_raw";
|
||||
public const string Advisory = "advisory";
|
||||
public const string AdvisoryObservations = "advisory_observations";
|
||||
public const string AdvisoryLinksets = "advisory_linksets";
|
||||
public const string Alias = "aliases";
|
||||
public const string Dto = "dto";
|
||||
public const string MergeEvent = "merge_events";
|
||||
public const string Document = "documents";
|
||||
public const string PsirtFlags = "psirt_flags";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,13 +72,32 @@ namespace StellaOps.Concelier.Storage.Mongo
|
||||
this.FetchedAt = FetchedAt ?? CreatedAt;
|
||||
}
|
||||
|
||||
public DocumentRecord(
|
||||
Guid Id,
|
||||
string SourceName,
|
||||
string Uri,
|
||||
string Sha256,
|
||||
string Status = "pending_parse",
|
||||
string? ContentType = null,
|
||||
IReadOnlyDictionary<string, string>? Headers = null,
|
||||
IReadOnlyDictionary<string, string>? Metadata = null,
|
||||
string? Etag = null,
|
||||
DateTimeOffset? LastModified = null,
|
||||
Guid? PayloadId = null,
|
||||
DateTimeOffset? ExpiresAt = null,
|
||||
byte[]? Payload = null,
|
||||
DateTimeOffset? FetchedAt = null)
|
||||
: this(Id, SourceName, Uri, DateTimeOffset.UtcNow, Sha256, Status, ContentType, Headers, Metadata, Etag, LastModified, PayloadId, ExpiresAt, Payload, FetchedAt)
|
||||
{
|
||||
}
|
||||
|
||||
public Guid Id { get; init; }
|
||||
public string SourceName { get; init; }
|
||||
public string Uri { get; init; }
|
||||
public string SourceName { get; init; } = string.Empty;
|
||||
public string Uri { get; init; } = string.Empty;
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
public DateTimeOffset FetchedAt { get; init; }
|
||||
public string Sha256 { get; init; }
|
||||
public string Status { get; init; }
|
||||
public string Sha256 { get; init; } = string.Empty;
|
||||
public string Status { get; init; } = string.Empty;
|
||||
public string? ContentType { get; init; }
|
||||
public IReadOnlyDictionary<string, string>? Headers { get; init; }
|
||||
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
|
||||
@@ -81,37 +108,37 @@ namespace StellaOps.Concelier.Storage.Mongo
|
||||
public byte[]? Payload { get; init; }
|
||||
}
|
||||
|
||||
public interface IDocumentStore
|
||||
{
|
||||
Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken);
|
||||
Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken);
|
||||
Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken);
|
||||
Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken);
|
||||
}
|
||||
public interface IDocumentStore
|
||||
{
|
||||
Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken);
|
||||
Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken);
|
||||
Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken);
|
||||
Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public class InMemoryDocumentStore : IDocumentStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<(string Source, string Uri), DocumentRecord> _records = new();
|
||||
private readonly ConcurrentDictionary<Guid, DocumentRecord> _byId = new();
|
||||
|
||||
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken)
|
||||
{
|
||||
_records.TryGetValue((sourceName, uri), out var record);
|
||||
return Task.FromResult<DocumentRecord?>(record);
|
||||
}
|
||||
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken)
|
||||
{
|
||||
_records.TryGetValue((sourceName, uri), out var record);
|
||||
return Task.FromResult<DocumentRecord?>(record);
|
||||
}
|
||||
|
||||
public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken)
|
||||
{
|
||||
_byId.TryGetValue(id, out var record);
|
||||
return Task.FromResult<DocumentRecord?>(record);
|
||||
}
|
||||
public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken)
|
||||
{
|
||||
_byId.TryGetValue(id, out var record);
|
||||
return Task.FromResult<DocumentRecord?>(record);
|
||||
}
|
||||
|
||||
public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_records[(record.SourceName, record.Uri)] = record;
|
||||
_byId[record.Id] = record;
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_records[(record.SourceName, record.Uri)] = record;
|
||||
_byId[record.Id] = record;
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
|
||||
public Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken)
|
||||
{
|
||||
@@ -129,6 +156,22 @@ public interface IDocumentStore
|
||||
{
|
||||
private readonly InMemoryDocumentStore _inner = new();
|
||||
|
||||
public DocumentStore()
|
||||
{
|
||||
}
|
||||
|
||||
public DocumentStore(object? database, MongoStorageOptions? options)
|
||||
{
|
||||
}
|
||||
|
||||
public DocumentStore(object? database, object? logger)
|
||||
{
|
||||
}
|
||||
|
||||
public DocumentStore(object? database, MongoStorageOptions? options, object? logger)
|
||||
{
|
||||
}
|
||||
|
||||
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken)
|
||||
=> _inner.FindBySourceAndUriAsync(sourceName, uri, cancellationToken);
|
||||
|
||||
@@ -142,47 +185,70 @@ public interface IDocumentStore
|
||||
=> _inner.UpdateStatusAsync(id, status, cancellationToken);
|
||||
}
|
||||
|
||||
public record DtoRecord(
|
||||
Guid Id,
|
||||
Guid DocumentId,
|
||||
string SourceName,
|
||||
string Format,
|
||||
MongoDB.Bson.BsonDocument Payload,
|
||||
DateTimeOffset CreatedAt)
|
||||
public record DtoRecord
|
||||
{
|
||||
public DtoRecord(
|
||||
Guid Id,
|
||||
Guid DocumentId,
|
||||
string SourceName,
|
||||
string Format,
|
||||
MongoDB.Bson.BsonDocument Payload,
|
||||
DateTimeOffset CreatedAt,
|
||||
string? SchemaVersion = null,
|
||||
DateTimeOffset? ValidatedAt = null)
|
||||
{
|
||||
this.Id = Id;
|
||||
this.DocumentId = DocumentId;
|
||||
this.SourceName = SourceName;
|
||||
this.Format = Format;
|
||||
this.Payload = Payload;
|
||||
this.CreatedAt = CreatedAt;
|
||||
this.SchemaVersion = SchemaVersion ?? string.Empty;
|
||||
this.ValidatedAt = ValidatedAt ?? CreatedAt;
|
||||
}
|
||||
|
||||
public Guid Id { get; init; }
|
||||
public Guid DocumentId { get; init; }
|
||||
public string SourceName { get; init; } = string.Empty;
|
||||
public string Format { get; init; } = string.Empty;
|
||||
public MongoDB.Bson.BsonDocument Payload { get; init; } = new();
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
public string SchemaVersion { get; init; } = string.Empty;
|
||||
public DateTimeOffset ValidatedAt { get; init; } = CreatedAt;
|
||||
public DateTimeOffset ValidatedAt { get; init; }
|
||||
}
|
||||
|
||||
public interface IDtoStore
|
||||
{
|
||||
Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken);
|
||||
Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken);
|
||||
Task<IReadOnlyList<DtoRecord>> GetBySourceAsync(string sourceName, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public class InMemoryDtoStore : IDtoStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<Guid, DtoRecord> _records = new();
|
||||
|
||||
public Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken)
|
||||
public interface IDtoStore
|
||||
{
|
||||
_records[record.DocumentId] = record;
|
||||
return Task.FromResult(record);
|
||||
Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken);
|
||||
Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken);
|
||||
Task<IReadOnlyList<DtoRecord>> GetBySourceAsync(string sourceName, int limit, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken)
|
||||
public class InMemoryDtoStore : IDtoStore
|
||||
{
|
||||
_records.TryGetValue(documentId, out var record);
|
||||
return Task.FromResult<DtoRecord?>(record);
|
||||
}
|
||||
private readonly ConcurrentDictionary<Guid, DtoRecord> _records = new();
|
||||
|
||||
public Task<IReadOnlyList<DtoRecord>> GetBySourceAsync(string sourceName, CancellationToken cancellationToken)
|
||||
{
|
||||
var matches = _records.Values.Where(r => string.Equals(r.SourceName, sourceName, StringComparison.OrdinalIgnoreCase)).ToArray();
|
||||
return Task.FromResult<IReadOnlyList<DtoRecord>>(matches);
|
||||
public Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_records[record.DocumentId] = record;
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
|
||||
public Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken)
|
||||
{
|
||||
_records.TryGetValue(documentId, out var record);
|
||||
return Task.FromResult<DtoRecord?>(record);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<DtoRecord>> GetBySourceAsync(string sourceName, int limit, CancellationToken cancellationToken)
|
||||
{
|
||||
var matches = _records.Values
|
||||
.Where(r => string.Equals(r.SourceName, sourceName, StringComparison.OrdinalIgnoreCase))
|
||||
.Take(limit)
|
||||
.ToArray();
|
||||
return Task.FromResult<IReadOnlyList<DtoRecord>>(matches);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class RawDocumentStorage
|
||||
{
|
||||
@@ -251,7 +317,7 @@ public sealed record SourceStateRecord(
|
||||
sourceName,
|
||||
Enabled: current?.Enabled ?? true,
|
||||
Paused: current?.Paused ?? false,
|
||||
Cursor: cursor.DeepClone(),
|
||||
Cursor: cursor.DeepClone().AsBsonDocument,
|
||||
LastSuccess: completedAt,
|
||||
LastFailure: current?.LastFailure,
|
||||
FailCount: current?.FailCount ?? 0,
|
||||
@@ -288,6 +354,18 @@ public sealed record SourceStateRecord(
|
||||
{
|
||||
private readonly InMemorySourceStateRepository _inner = new();
|
||||
|
||||
public MongoSourceStateRepository()
|
||||
{
|
||||
}
|
||||
|
||||
public MongoSourceStateRepository(object? database, MongoStorageOptions? options)
|
||||
{
|
||||
}
|
||||
|
||||
public MongoSourceStateRepository(object? database, object? logger)
|
||||
{
|
||||
}
|
||||
|
||||
public Task<SourceStateRecord?> TryGetAsync(string sourceName, CancellationToken cancellationToken)
|
||||
=> _inner.TryGetAsync(sourceName, cancellationToken);
|
||||
|
||||
@@ -304,6 +382,15 @@ public sealed record SourceStateRecord(
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Advisories
|
||||
{
|
||||
public sealed class AdvisoryDocument
|
||||
{
|
||||
public string AdvisoryKey { get; set; } = string.Empty;
|
||||
public MongoDB.Bson.BsonDocument Payload { get; set; } = new();
|
||||
public DateTime? Modified { get; set; }
|
||||
public DateTime? Published { get; set; }
|
||||
public DateTime? CreatedAt { get; set; }
|
||||
}
|
||||
|
||||
public interface IAdvisoryStore
|
||||
{
|
||||
Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken);
|
||||
@@ -360,18 +447,49 @@ namespace StellaOps.Concelier.Storage.Mongo.Aliases
|
||||
public sealed record AliasEntry(string Scheme, string Value);
|
||||
public sealed record AliasRecord(string AdvisoryKey, string Scheme, string Value, DateTimeOffset? UpdatedAt = null);
|
||||
public sealed record AliasCollision(string Scheme, string Value, IReadOnlyList<string> AdvisoryKeys);
|
||||
public sealed record AliasUpsertResult(string AdvisoryKey, IReadOnlyList<AliasCollision> Collisions);
|
||||
|
||||
public interface IAliasStore
|
||||
{
|
||||
Task<IReadOnlyList<AliasRecord>> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken);
|
||||
Task<IReadOnlyList<AliasRecord>> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken);
|
||||
Task<AliasUpsertResult> ReplaceAsync(string advisoryKey, IEnumerable<AliasEntry> entries, DateTimeOffset updatedAt, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class InMemoryAliasStore : IAliasStore
|
||||
public sealed class AliasStore : InMemoryAliasStore
|
||||
{
|
||||
public AliasStore()
|
||||
{
|
||||
}
|
||||
|
||||
public AliasStore(object? database, object? options)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
public class InMemoryAliasStore : IAliasStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, List<AliasRecord>> _byAdvisory = new(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly ConcurrentDictionary<(string Scheme, string Value), List<AliasRecord>> _byAlias = new();
|
||||
|
||||
public Task<AliasUpsertResult> ReplaceAsync(string advisoryKey, IEnumerable<AliasEntry> entries, DateTimeOffset updatedAt, CancellationToken cancellationToken)
|
||||
{
|
||||
var records = entries.Select(e => new AliasRecord(advisoryKey, e.Scheme, e.Value, updatedAt)).ToList();
|
||||
_byAdvisory[advisoryKey] = records;
|
||||
foreach (var record in records)
|
||||
{
|
||||
var list = _byAlias.GetOrAdd((record.Scheme, record.Value), _ => new List<AliasRecord>());
|
||||
list.RemoveAll(r => string.Equals(r.AdvisoryKey, advisoryKey, StringComparison.OrdinalIgnoreCase));
|
||||
list.Add(record);
|
||||
}
|
||||
var collisions = _byAlias.Values
|
||||
.Where(list => list.Count > 1)
|
||||
.Select(list => new AliasCollision(list[0].Scheme, list[0].Value, list.Select(r => r.AdvisoryKey).Distinct(StringComparer.OrdinalIgnoreCase).ToArray()))
|
||||
.ToArray();
|
||||
|
||||
return Task.FromResult(new AliasUpsertResult(advisoryKey, collisions));
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AliasRecord>> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken)
|
||||
{
|
||||
_byAdvisory.TryGetValue(advisoryKey, out var records);
|
||||
@@ -400,11 +518,16 @@ namespace StellaOps.Concelier.Storage.Mongo.ChangeHistory
|
||||
string Snapshot,
|
||||
string PreviousSnapshot,
|
||||
IReadOnlyList<ChangeHistoryFieldChange> Changes,
|
||||
DateTimeOffset CreatedAt);
|
||||
DateTimeOffset CreatedAt)
|
||||
{
|
||||
public string? PreviousHash => PreviousSnapshotHash;
|
||||
public string? CurrentHash => SnapshotHash;
|
||||
}
|
||||
|
||||
public interface IChangeHistoryStore
|
||||
{
|
||||
Task AddAsync(ChangeHistoryRecord record, CancellationToken cancellationToken);
|
||||
Task<IReadOnlyList<ChangeHistoryRecord>> GetRecentAsync(string sourceName, string advisoryKey, int limit, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class InMemoryChangeHistoryStore : IChangeHistoryStore
|
||||
@@ -415,6 +538,18 @@ namespace StellaOps.Concelier.Storage.Mongo.ChangeHistory
|
||||
_records.Add(record);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<ChangeHistoryRecord>> GetRecentAsync(string sourceName, string advisoryKey, int limit, CancellationToken cancellationToken)
|
||||
{
|
||||
var matches = _records
|
||||
.Where(r =>
|
||||
string.Equals(r.SourceName, sourceName, StringComparison.OrdinalIgnoreCase) &&
|
||||
string.Equals(r.AdvisoryKey, advisoryKey, StringComparison.OrdinalIgnoreCase))
|
||||
.OrderByDescending(r => r.CreatedAt)
|
||||
.Take(limit)
|
||||
.ToArray();
|
||||
return Task.FromResult<IReadOnlyList<ChangeHistoryRecord>>(matches);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -597,6 +732,25 @@ namespace StellaOps.Concelier.Storage.Mongo.MergeEvents
|
||||
return Task.FromResult<IReadOnlyList<MergeEventRecord>>(records);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class MergeEventStore : IMergeEventStore
|
||||
{
|
||||
private readonly InMemoryMergeEventStore _inner = new();
|
||||
|
||||
public MergeEventStore()
|
||||
{
|
||||
}
|
||||
|
||||
public MergeEventStore(object? database, object? logger)
|
||||
{
|
||||
}
|
||||
|
||||
public Task AppendAsync(MergeEventRecord record, CancellationToken cancellationToken)
|
||||
=> _inner.AppendAsync(record, cancellationToken);
|
||||
|
||||
public Task<IReadOnlyList<MergeEventRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken)
|
||||
=> _inner.GetRecentAsync(advisoryKey, limit, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Documents
|
||||
@@ -617,12 +771,16 @@ namespace StellaOps.Concelier.Storage.Mongo.Dtos
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.PsirtFlags
|
||||
{
|
||||
public sealed record PsirtFlagRecord(string AdvisoryId, string Vendor, string SourceName, string? ExternalId, DateTimeOffset RecordedAt);
|
||||
public sealed record PsirtFlagRecord(string AdvisoryId, string Vendor, string SourceName, string? ExternalId, DateTimeOffset RecordedAt)
|
||||
{
|
||||
public string AdvisoryKey => AdvisoryId;
|
||||
}
|
||||
|
||||
public interface IPsirtFlagStore
|
||||
{
|
||||
Task UpsertAsync(PsirtFlagRecord flag, CancellationToken cancellationToken);
|
||||
Task<IReadOnlyList<PsirtFlagRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken);
|
||||
Task<PsirtFlagRecord?> FindAsync(string advisoryKey, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class InMemoryPsirtFlagStore : IPsirtFlagStore
|
||||
@@ -645,6 +803,94 @@ namespace StellaOps.Concelier.Storage.Mongo.PsirtFlags
|
||||
|
||||
return Task.FromResult<IReadOnlyList<PsirtFlagRecord>>(records);
|
||||
}
|
||||
|
||||
public Task<PsirtFlagRecord?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
|
||||
{
|
||||
_records.TryGetValue(advisoryKey, out var flag);
|
||||
return Task.FromResult<PsirtFlagRecord?>(flag);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Observations
|
||||
{
|
||||
public sealed class AdvisoryObservationDocument
|
||||
{
|
||||
public string Id { get; set; } = string.Empty;
|
||||
public string Tenant { get; set; } = string.Empty;
|
||||
public DateTime CreatedAt { get; set; }
|
||||
public AdvisoryObservationSourceDocument Source { get; set; } = new();
|
||||
public AdvisoryObservationUpstreamDocument Upstream { get; set; } = new();
|
||||
public AdvisoryObservationContentDocument Content { get; set; } = new();
|
||||
public AdvisoryObservationLinksetDocument Linkset { get; set; } = new();
|
||||
public IDictionary<string, string> Attributes { get; set; } = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
public sealed class AdvisoryObservationSourceDocument
|
||||
{
|
||||
public string Vendor { get; set; } = string.Empty;
|
||||
public string Stream { get; set; } = string.Empty;
|
||||
public string Api { get; set; } = string.Empty;
|
||||
}
|
||||
|
||||
public sealed class AdvisoryObservationUpstreamDocument
|
||||
{
|
||||
public string UpstreamId { get; set; } = string.Empty;
|
||||
public string? DocumentVersion { get; set; }
|
||||
public DateTime FetchedAt { get; set; }
|
||||
public DateTime ReceivedAt { get; set; }
|
||||
public string ContentHash { get; set; } = string.Empty;
|
||||
public AdvisoryObservationSignatureDocument Signature { get; set; } = new();
|
||||
public IDictionary<string, string> Metadata { get; set; } = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
public sealed class AdvisoryObservationSignatureDocument
|
||||
{
|
||||
public bool Present { get; set; }
|
||||
public string? Format { get; set; }
|
||||
public string? KeyId { get; set; }
|
||||
public string? Signature { get; set; }
|
||||
}
|
||||
|
||||
public sealed class AdvisoryObservationContentDocument
|
||||
{
|
||||
public string Format { get; set; } = string.Empty;
|
||||
public string SpecVersion { get; set; } = string.Empty;
|
||||
public BsonDocument Raw { get; set; } = new();
|
||||
public IDictionary<string, string> Metadata { get; set; } = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
public sealed class AdvisoryObservationLinksetDocument
|
||||
{
|
||||
public List<string>? Aliases { get; set; }
|
||||
public List<string>? Purls { get; set; }
|
||||
public List<string>? Cpes { get; set; }
|
||||
public List<AdvisoryObservationReferenceDocument> References { get; set; } = new();
|
||||
}
|
||||
|
||||
public sealed class AdvisoryObservationReferenceDocument
|
||||
{
|
||||
public string Type { get; set; } = string.Empty;
|
||||
public string Url { get; set; } = string.Empty;
|
||||
}
|
||||
}
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Linksets
|
||||
{
|
||||
public sealed class AdvisoryLinksetDocument
|
||||
{
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
public string Source { get; set; } = string.Empty;
|
||||
public string AdvisoryId { get; set; } = string.Empty;
|
||||
public IReadOnlyList<string> Observations { get; set; } = Array.Empty<string>();
|
||||
public DateTime CreatedAt { get; set; }
|
||||
public AdvisoryLinksetNormalizedDocument Normalized { get; set; } = new();
|
||||
}
|
||||
|
||||
public sealed class AdvisoryLinksetNormalizedDocument
|
||||
{
|
||||
public IReadOnlyList<string> Purls { get; set; } = Array.Empty<string>();
|
||||
public IReadOnlyList<string> Versions { get; set; } = Array.Empty<string>();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -88,8 +88,10 @@ public sealed class CertCcMapperTests
|
||||
Id: Guid.NewGuid(),
|
||||
DocumentId: document.Id,
|
||||
SourceName: "cert-cc",
|
||||
Format: "certcc.vince.note.v1",
|
||||
SchemaVersion: "certcc.vince.note.v1",
|
||||
Payload: new BsonDocument(),
|
||||
CreatedAt: PublishedAt,
|
||||
ValidatedAt: PublishedAt.AddMinutes(1));
|
||||
|
||||
var advisory = CertCcMapper.Map(dto, document, dtoRecord, "cert-cc");
|
||||
|
||||
@@ -190,11 +190,11 @@ public sealed class SourceStateSeedProcessorTests : IAsyncLifetime
|
||||
Assert.NotNull(refreshedRecord);
|
||||
Assert.Equal(documentId, refreshedRecord!.Id);
|
||||
Assert.NotNull(refreshedRecord.PayloadId);
|
||||
Assert.NotEqual(previousGridId, refreshedRecord.PayloadId);
|
||||
Assert.NotEqual(previousGridId?.ToString(), refreshedRecord.PayloadId?.ToString());
|
||||
|
||||
var files = await filesCollection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync();
|
||||
Assert.Single(files);
|
||||
Assert.NotEqual(previousGridId, files[0]["_id"].AsObjectId);
|
||||
Assert.NotEqual(previousGridId?.ToString(), files[0]["_id"].AsObjectId.ToString());
|
||||
}
|
||||
|
||||
private SourceStateSeedProcessor CreateProcessor()
|
||||
|
||||
@@ -34,7 +34,7 @@ public sealed class SuseMapperTests
|
||||
},
|
||||
Etag: "adv-1",
|
||||
LastModified: DateTimeOffset.UtcNow,
|
||||
PayloadId: ObjectId.Empty);
|
||||
PayloadId: Guid.Empty);
|
||||
|
||||
var mapped = SuseMapper.Map(dto, document, DateTimeOffset.UtcNow);
|
||||
|
||||
|
||||
@@ -97,8 +97,10 @@ public sealed class OsvConflictFixtureTests
|
||||
Id: Guid.Parse("6f7d5ce7-cb47-40a5-8b41-8ad022b5fd5c"),
|
||||
DocumentId: document.Id,
|
||||
SourceName: OsvConnectorPlugin.SourceName,
|
||||
Format: "osv.v1",
|
||||
SchemaVersion: "osv.v1",
|
||||
Payload: new BsonDocument("id", dto.Id),
|
||||
CreatedAt: new DateTimeOffset(2025, 3, 6, 12, 0, 0, TimeSpan.Zero),
|
||||
ValidatedAt: new DateTimeOffset(2025, 3, 6, 12, 5, 0, TimeSpan.Zero));
|
||||
|
||||
var advisory = OsvMapper.Map(dto, document, dtoRecord, "npm");
|
||||
|
||||
@@ -65,7 +65,7 @@ public sealed class RuBduMapperTests
|
||||
null,
|
||||
null,
|
||||
dto.IdentifyDate,
|
||||
ObjectId.GenerateNewId());
|
||||
PayloadId: Guid.NewGuid());
|
||||
|
||||
var advisory = RuBduMapper.Map(dto, document, dto.IdentifyDate!.Value);
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ public sealed class RuNkckiMapperTests
|
||||
null,
|
||||
null,
|
||||
dto.DateUpdated,
|
||||
ObjectId.GenerateNewId());
|
||||
PayloadId: Guid.NewGuid());
|
||||
|
||||
Assert.Equal("КРИТИЧЕСКИЙ", dto.CvssRating);
|
||||
var normalizeSeverity = typeof(RuNkckiMapper).GetMethod("NormalizeSeverity", BindingFlags.NonPublic | BindingFlags.Static)!;
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading.Tasks;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading.Tasks;
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Jobs;
|
||||
using StellaOps.Concelier.Core.Events;
|
||||
using StellaOps.Concelier.Exporter.Json;
|
||||
@@ -15,15 +14,16 @@ using StellaOps.Concelier.Storage.Mongo.Exporting;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.DependencyInjection;
|
||||
|
||||
namespace StellaOps.Concelier.Exporter.Json.Tests;
|
||||
|
||||
public sealed class JsonExporterDependencyInjectionRoutineTests
|
||||
{
|
||||
[Fact]
|
||||
public void Register_AddsJobDefinitionAndServices()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
using StellaOps.Provenance.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Exporter.Json.Tests;
|
||||
|
||||
public sealed class JsonExporterDependencyInjectionRoutineTests
|
||||
{
|
||||
[Fact]
|
||||
public void Register_AddsJobDefinitionAndServices()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
services.AddLogging();
|
||||
services.AddSingleton<IAdvisoryStore, StubAdvisoryStore>();
|
||||
services.AddSingleton<IExportStateStore, StubExportStateStore>();
|
||||
@@ -32,64 +32,60 @@ public sealed class JsonExporterDependencyInjectionRoutineTests
|
||||
services.AddOptions<JobSchedulerOptions>();
|
||||
services.Configure<CryptoHashOptions>(_ => { });
|
||||
services.AddStellaOpsCrypto();
|
||||
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(new Dictionary<string, string?>())
|
||||
.Build();
|
||||
|
||||
var routine = new JsonExporterDependencyInjectionRoutine();
|
||||
routine.Register(services, configuration);
|
||||
|
||||
using var provider = services.BuildServiceProvider();
|
||||
var optionsAccessor = provider.GetRequiredService<IOptions<JobSchedulerOptions>>();
|
||||
var options = optionsAccessor.Value;
|
||||
|
||||
Assert.True(options.Definitions.TryGetValue(JsonExportJob.JobKind, out var definition));
|
||||
Assert.Equal(typeof(JsonExportJob), definition.JobType);
|
||||
Assert.True(definition.Enabled);
|
||||
|
||||
var exporter = provider.GetRequiredService<JsonFeedExporter>();
|
||||
Assert.NotNull(exporter);
|
||||
}
|
||||
|
||||
private sealed class StubAdvisoryStore : IAdvisoryStore
|
||||
{
|
||||
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(new Dictionary<string, string?>())
|
||||
.Build();
|
||||
|
||||
var routine = new JsonExporterDependencyInjectionRoutine();
|
||||
routine.Register(services, configuration);
|
||||
|
||||
using var provider = services.BuildServiceProvider();
|
||||
var optionsAccessor = provider.GetRequiredService<IOptions<JobSchedulerOptions>>();
|
||||
var options = optionsAccessor.Value;
|
||||
|
||||
Assert.True(options.Definitions.TryGetValue(JsonExportJob.JobKind, out var definition));
|
||||
Assert.Equal(typeof(JsonExportJob), definition.JobType);
|
||||
Assert.True(definition.Enabled);
|
||||
|
||||
var exporter = provider.GetRequiredService<JsonFeedExporter>();
|
||||
Assert.NotNull(exporter);
|
||||
}
|
||||
|
||||
private sealed class StubAdvisoryStore : IAdvisoryStore
|
||||
{
|
||||
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken)
|
||||
{
|
||||
_ = session;
|
||||
return Task.FromResult<IReadOnlyList<Advisory>>(Array.Empty<Advisory>());
|
||||
}
|
||||
|
||||
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
|
||||
{
|
||||
_ = session;
|
||||
return Task.FromResult<Advisory?>(null);
|
||||
}
|
||||
|
||||
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken)
|
||||
{
|
||||
_ = session;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
_ = session;
|
||||
return Enumerate(cancellationToken);
|
||||
|
||||
static async IAsyncEnumerable<Advisory> Enumerate([EnumeratorCancellation] CancellationToken ct)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
await Task.Yield();
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ct.ThrowIfCancellationRequested();
|
||||
await Task.Yield();
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class StubExportStateStore : IExportStateStore
|
||||
{
|
||||
private ExportStateRecord? _record;
|
||||
|
||||
|
||||
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
|
||||
{
|
||||
return Task.FromResult(_record);
|
||||
@@ -107,6 +103,9 @@ public sealed class JsonExporterDependencyInjectionRoutineTests
|
||||
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
|
||||
=> throw new NotSupportedException();
|
||||
|
||||
public ValueTask AttachStatementProvenanceAsync(Guid statementId, DsseProvenance provenance, TrustInfo trust, CancellationToken cancellationToken)
|
||||
=> ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
|
||||
{
|
||||
return ValueTask.FromResult(new AdvisoryReplay(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -11,7 +11,6 @@ using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Exporter.Json;
|
||||
using StellaOps.Concelier.Exporter.TrivyDb;
|
||||
using StellaOps.Concelier.Models;
|
||||
@@ -883,27 +882,23 @@ public sealed class TrivyDbFeedExporterTests : IDisposable
|
||||
_advisories = advisories;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken)
|
||||
{
|
||||
_ = session;
|
||||
return Task.FromResult(_advisories);
|
||||
}
|
||||
|
||||
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
|
||||
{
|
||||
_ = session;
|
||||
return Task.FromResult<Advisory?>(_advisories.FirstOrDefault(a => a.AdvisoryKey == advisoryKey));
|
||||
}
|
||||
|
||||
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken)
|
||||
{
|
||||
_ = session;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
_ = session;
|
||||
return EnumerateAsync(cancellationToken);
|
||||
|
||||
async IAsyncEnumerable<Advisory> EnumerateAsync([EnumeratorCancellation] CancellationToken ct)
|
||||
|
||||
@@ -2,109 +2,109 @@ using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core;
|
||||
using StellaOps.Concelier.Core.Events;
|
||||
using StellaOps.Concelier.Merge.Services;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests;
|
||||
|
||||
public sealed class AdvisoryMergeServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task MergeAsync_AppliesCanonicalRulesAndPersistsDecisions()
|
||||
{
|
||||
var aliasStore = new FakeAliasStore();
|
||||
aliasStore.Register("GHSA-aaaa-bbbb-cccc",
|
||||
(AliasSchemes.Ghsa, "GHSA-aaaa-bbbb-cccc"),
|
||||
(AliasSchemes.Cve, "CVE-2025-4242"));
|
||||
aliasStore.Register("CVE-2025-4242",
|
||||
(AliasSchemes.Cve, "CVE-2025-4242"));
|
||||
aliasStore.Register("OSV-2025-xyz",
|
||||
(AliasSchemes.OsV, "OSV-2025-xyz"),
|
||||
(AliasSchemes.Cve, "CVE-2025-4242"));
|
||||
|
||||
var advisoryStore = new FakeAdvisoryStore();
|
||||
advisoryStore.Seed(CreateGhsaAdvisory(), CreateNvdAdvisory(), CreateOsvAdvisory());
|
||||
|
||||
var mergeEventStore = new InMemoryMergeEventStore();
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 4, 1, 0, 0, 0, TimeSpan.Zero));
|
||||
var writer = new MergeEventWriter(mergeEventStore, new CanonicalHashCalculator(), timeProvider, NullLogger<MergeEventWriter>.Instance);
|
||||
var precedenceMerger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), timeProvider);
|
||||
var aliasResolver = new AliasGraphResolver(aliasStore);
|
||||
var canonicalMerger = new CanonicalMerger(timeProvider);
|
||||
var eventLog = new RecordingAdvisoryEventLog();
|
||||
var service = new AdvisoryMergeService(aliasResolver, advisoryStore, precedenceMerger, writer, canonicalMerger, eventLog, timeProvider, NullLogger<AdvisoryMergeService>.Instance);
|
||||
|
||||
var result = await service.MergeAsync("GHSA-aaaa-bbbb-cccc", CancellationToken.None);
|
||||
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Concelier.Core;
|
||||
using StellaOps.Concelier.Core.Events;
|
||||
using StellaOps.Concelier.Merge.Services;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
|
||||
using StellaOps.Provenance.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Tests;
|
||||
|
||||
public sealed class AdvisoryMergeServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task MergeAsync_AppliesCanonicalRulesAndPersistsDecisions()
|
||||
{
|
||||
var aliasStore = new FakeAliasStore();
|
||||
aliasStore.Register("GHSA-aaaa-bbbb-cccc",
|
||||
(AliasSchemes.Ghsa, "GHSA-aaaa-bbbb-cccc"),
|
||||
(AliasSchemes.Cve, "CVE-2025-4242"));
|
||||
aliasStore.Register("CVE-2025-4242",
|
||||
(AliasSchemes.Cve, "CVE-2025-4242"));
|
||||
aliasStore.Register("OSV-2025-xyz",
|
||||
(AliasSchemes.OsV, "OSV-2025-xyz"),
|
||||
(AliasSchemes.Cve, "CVE-2025-4242"));
|
||||
|
||||
var advisoryStore = new FakeAdvisoryStore();
|
||||
advisoryStore.Seed(CreateGhsaAdvisory(), CreateNvdAdvisory(), CreateOsvAdvisory());
|
||||
|
||||
var mergeEventStore = new InMemoryMergeEventStore();
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 4, 1, 0, 0, 0, TimeSpan.Zero));
|
||||
var writer = new MergeEventWriter(mergeEventStore, new CanonicalHashCalculator(), timeProvider, NullLogger<MergeEventWriter>.Instance);
|
||||
var precedenceMerger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), timeProvider);
|
||||
var aliasResolver = new AliasGraphResolver(aliasStore);
|
||||
var canonicalMerger = new CanonicalMerger(timeProvider);
|
||||
var eventLog = new RecordingAdvisoryEventLog();
|
||||
var service = new AdvisoryMergeService(aliasResolver, advisoryStore, precedenceMerger, writer, canonicalMerger, eventLog, timeProvider, NullLogger<AdvisoryMergeService>.Instance);
|
||||
|
||||
var result = await service.MergeAsync("GHSA-aaaa-bbbb-cccc", CancellationToken.None);
|
||||
|
||||
Assert.NotNull(result.Merged);
|
||||
Assert.Equal("OSV summary overrides", result.Merged!.Summary);
|
||||
Assert.Empty(result.Conflicts);
|
||||
|
||||
var upserted = advisoryStore.LastUpserted;
|
||||
Assert.NotNull(upserted);
|
||||
Assert.Equal("CVE-2025-4242", upserted!.AdvisoryKey);
|
||||
Assert.Equal("OSV summary overrides", upserted.Summary);
|
||||
|
||||
var mergeRecord = mergeEventStore.LastRecord;
|
||||
Assert.NotNull(mergeRecord);
|
||||
var summaryDecision = Assert.Single(mergeRecord!.FieldDecisions, decision => decision.Field == "summary");
|
||||
Assert.Equal("osv", summaryDecision.SelectedSource);
|
||||
Assert.Equal("freshness_override", summaryDecision.DecisionReason);
|
||||
|
||||
var appendRequest = eventLog.LastRequest;
|
||||
Assert.NotNull(appendRequest);
|
||||
Assert.Contains(appendRequest!.Statements, statement => string.Equals(statement.Advisory.AdvisoryKey, "CVE-2025-4242", StringComparison.OrdinalIgnoreCase));
|
||||
Assert.True(appendRequest.Conflicts is null || appendRequest.Conflicts.Count == 0);
|
||||
}
|
||||
|
||||
private static Advisory CreateGhsaAdvisory()
|
||||
{
|
||||
var recorded = DateTimeOffset.Parse("2025-03-01T00:00:00Z");
|
||||
var provenance = new AdvisoryProvenance("ghsa", "map", "GHSA-aaaa-bbbb-cccc", recorded, new[] { ProvenanceFieldMasks.Advisory });
|
||||
return new Advisory(
|
||||
"GHSA-aaaa-bbbb-cccc",
|
||||
"Container escape",
|
||||
"Initial GHSA summary.",
|
||||
"en",
|
||||
recorded,
|
||||
recorded,
|
||||
"medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2025-4242", "GHSA-aaaa-bbbb-cccc" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
}
|
||||
|
||||
private static Advisory CreateNvdAdvisory()
|
||||
{
|
||||
var recorded = DateTimeOffset.Parse("2025-03-02T00:00:00Z");
|
||||
var provenance = new AdvisoryProvenance("nvd", "map", "CVE-2025-4242", recorded, new[] { ProvenanceFieldMasks.Advisory });
|
||||
return new Advisory(
|
||||
"CVE-2025-4242",
|
||||
"CVE-2025-4242",
|
||||
"Baseline NVD summary.",
|
||||
"en",
|
||||
recorded,
|
||||
recorded,
|
||||
"high",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2025-4242" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
}
|
||||
|
||||
|
||||
var upserted = advisoryStore.LastUpserted;
|
||||
Assert.NotNull(upserted);
|
||||
Assert.Equal("CVE-2025-4242", upserted!.AdvisoryKey);
|
||||
Assert.Equal("OSV summary overrides", upserted.Summary);
|
||||
|
||||
var mergeRecord = mergeEventStore.LastRecord;
|
||||
Assert.NotNull(mergeRecord);
|
||||
var summaryDecision = Assert.Single(mergeRecord!.FieldDecisions, decision => decision.Field == "summary");
|
||||
Assert.Equal("osv", summaryDecision.SelectedSource);
|
||||
Assert.Equal("freshness_override", summaryDecision.DecisionReason);
|
||||
|
||||
var appendRequest = eventLog.LastRequest;
|
||||
Assert.NotNull(appendRequest);
|
||||
Assert.Contains(appendRequest!.Statements, statement => string.Equals(statement.Advisory.AdvisoryKey, "CVE-2025-4242", StringComparison.OrdinalIgnoreCase));
|
||||
Assert.True(appendRequest.Conflicts is null || appendRequest.Conflicts.Count == 0);
|
||||
}
|
||||
|
||||
private static Advisory CreateGhsaAdvisory()
|
||||
{
|
||||
var recorded = DateTimeOffset.Parse("2025-03-01T00:00:00Z");
|
||||
var provenance = new AdvisoryProvenance("ghsa", "map", "GHSA-aaaa-bbbb-cccc", recorded, new[] { ProvenanceFieldMasks.Advisory });
|
||||
return new Advisory(
|
||||
"GHSA-aaaa-bbbb-cccc",
|
||||
"Container escape",
|
||||
"Initial GHSA summary.",
|
||||
"en",
|
||||
recorded,
|
||||
recorded,
|
||||
"medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2025-4242", "GHSA-aaaa-bbbb-cccc" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
}
|
||||
|
||||
private static Advisory CreateNvdAdvisory()
|
||||
{
|
||||
var recorded = DateTimeOffset.Parse("2025-03-02T00:00:00Z");
|
||||
var provenance = new AdvisoryProvenance("nvd", "map", "CVE-2025-4242", recorded, new[] { ProvenanceFieldMasks.Advisory });
|
||||
return new Advisory(
|
||||
"CVE-2025-4242",
|
||||
"CVE-2025-4242",
|
||||
"Baseline NVD summary.",
|
||||
"en",
|
||||
recorded,
|
||||
recorded,
|
||||
"high",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2025-4242" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
}
|
||||
|
||||
private static Advisory CreateOsvAdvisory()
|
||||
{
|
||||
var recorded = DateTimeOffset.Parse("2025-03-05T12:00:00Z");
|
||||
@@ -207,120 +207,119 @@ public sealed class AdvisoryMergeServiceTests
|
||||
Assert.Equal(conflict.ConflictId, appendedConflict.ConflictId);
|
||||
Assert.Equal(conflict.StatementIds, appendedConflict.StatementIds.ToImmutableArray());
|
||||
}
|
||||
|
||||
|
||||
private sealed class RecordingAdvisoryEventLog : IAdvisoryEventLog
|
||||
{
|
||||
public AdvisoryEventAppendRequest? LastRequest { get; private set; }
|
||||
|
||||
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
LastRequest = request;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeAliasStore : IAliasStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, List<AliasRecord>> _records = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public void Register(string advisoryKey, params (string Scheme, string Value)[] aliases)
|
||||
{
|
||||
var list = new List<AliasRecord>();
|
||||
foreach (var (scheme, value) in aliases)
|
||||
{
|
||||
list.Add(new AliasRecord(advisoryKey, scheme, value, DateTimeOffset.UtcNow));
|
||||
}
|
||||
|
||||
_records[advisoryKey] = list;
|
||||
}
|
||||
|
||||
public Task<AliasUpsertResult> ReplaceAsync(string advisoryKey, IEnumerable<AliasEntry> aliases, DateTimeOffset updatedAt, CancellationToken cancellationToken)
|
||||
{
|
||||
return Task.FromResult(new AliasUpsertResult(advisoryKey, Array.Empty<AliasCollision>()));
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AliasRecord>> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken)
|
||||
{
|
||||
var matches = _records.Values
|
||||
.SelectMany(static records => records)
|
||||
.Where(record => string.Equals(record.Scheme, scheme, StringComparison.OrdinalIgnoreCase) && string.Equals(record.Value, value, StringComparison.OrdinalIgnoreCase))
|
||||
.ToList();
|
||||
|
||||
return Task.FromResult<IReadOnlyList<AliasRecord>>(matches);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AliasRecord>> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_records.TryGetValue(advisoryKey, out var records))
|
||||
{
|
||||
return Task.FromResult<IReadOnlyList<AliasRecord>>(records);
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyList<AliasRecord>>(Array.Empty<AliasRecord>());
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeAdvisoryStore : IAdvisoryStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, Advisory> _advisories = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public Advisory? LastUpserted { get; private set; }
|
||||
|
||||
public void Seed(params Advisory[] advisories)
|
||||
{
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
_advisories[advisory.AdvisoryKey] = advisory;
|
||||
}
|
||||
}
|
||||
|
||||
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
{
|
||||
_ = session;
|
||||
_advisories.TryGetValue(advisoryKey, out var advisory);
|
||||
return Task.FromResult(advisory);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
{
|
||||
_ = session;
|
||||
return Task.FromResult<IReadOnlyList<Advisory>>(Array.Empty<Advisory>());
|
||||
}
|
||||
|
||||
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
{
|
||||
_ = session;
|
||||
_advisories[advisory.AdvisoryKey] = advisory;
|
||||
LastUpserted = advisory;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
{
|
||||
_ = session;
|
||||
return AsyncEnumerable.Empty<Advisory>();
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class InMemoryMergeEventStore : IMergeEventStore
|
||||
{
|
||||
public MergeEventRecord? LastRecord { get; private set; }
|
||||
|
||||
public Task AppendAsync(MergeEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
LastRecord = record;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<MergeEventRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken)
|
||||
{
|
||||
return Task.FromResult<IReadOnlyList<MergeEventRecord>>(Array.Empty<MergeEventRecord>());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private sealed class RecordingAdvisoryEventLog : IAdvisoryEventLog
|
||||
{
|
||||
public AdvisoryEventAppendRequest? LastRequest { get; private set; }
|
||||
|
||||
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
LastRequest = request;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask AttachStatementProvenanceAsync(Guid statementId, DsseProvenance provenance, TrustInfo trust, CancellationToken cancellationToken)
|
||||
=> ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeAliasStore : IAliasStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, List<AliasRecord>> _records = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public void Register(string advisoryKey, params (string Scheme, string Value)[] aliases)
|
||||
{
|
||||
var list = new List<AliasRecord>();
|
||||
foreach (var (scheme, value) in aliases)
|
||||
{
|
||||
list.Add(new AliasRecord(advisoryKey, scheme, value, DateTimeOffset.UtcNow));
|
||||
}
|
||||
|
||||
_records[advisoryKey] = list;
|
||||
}
|
||||
|
||||
public Task<AliasUpsertResult> ReplaceAsync(string advisoryKey, IEnumerable<AliasEntry> aliases, DateTimeOffset updatedAt, CancellationToken cancellationToken)
|
||||
{
|
||||
return Task.FromResult(new AliasUpsertResult(advisoryKey, Array.Empty<AliasCollision>()));
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AliasRecord>> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken)
|
||||
{
|
||||
var matches = _records.Values
|
||||
.SelectMany(static records => records)
|
||||
.Where(record => string.Equals(record.Scheme, scheme, StringComparison.OrdinalIgnoreCase) && string.Equals(record.Value, value, StringComparison.OrdinalIgnoreCase))
|
||||
.ToList();
|
||||
|
||||
return Task.FromResult<IReadOnlyList<AliasRecord>>(matches);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AliasRecord>> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_records.TryGetValue(advisoryKey, out var records))
|
||||
{
|
||||
return Task.FromResult<IReadOnlyList<AliasRecord>>(records);
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyList<AliasRecord>>(Array.Empty<AliasRecord>());
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeAdvisoryStore : IAdvisoryStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, Advisory> _advisories = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public Advisory? LastUpserted { get; private set; }
|
||||
|
||||
public void Seed(params Advisory[] advisories)
|
||||
{
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
_advisories[advisory.AdvisoryKey] = advisory;
|
||||
}
|
||||
}
|
||||
|
||||
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
|
||||
{
|
||||
_advisories.TryGetValue(advisoryKey, out var advisory);
|
||||
return Task.FromResult(advisory);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken)
|
||||
{
|
||||
return Task.FromResult<IReadOnlyList<Advisory>>(Array.Empty<Advisory>());
|
||||
}
|
||||
|
||||
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken)
|
||||
{
|
||||
_advisories[advisory.AdvisoryKey] = advisory;
|
||||
LastUpserted = advisory;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
return AsyncEnumerable.Empty<Advisory>();
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class InMemoryMergeEventStore : IMergeEventStore
|
||||
{
|
||||
public MergeEventRecord? LastRecord { get; private set; }
|
||||
|
||||
public Task AppendAsync(MergeEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
LastRecord = record;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<MergeEventRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken)
|
||||
{
|
||||
return Task.FromResult<IReadOnlyList<MergeEventRecord>>(Array.Empty<MergeEventRecord>());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user