feat: Add VEX compact fixture and implement offline verifier for Findings Ledger exports
- Introduced a new VEX compact fixture for testing purposes. - Implemented `verify_export.py` script to validate Findings Ledger exports, ensuring deterministic ordering and applying redaction manifests. - Added a lightweight stub `HarnessRunner` for unit tests to validate ledger hashing expectations. - Documented tasks related to the Mirror Creator. - Created models for entropy signals and implemented the `EntropyPenaltyCalculator` to compute penalties based on scanner outputs. - Developed unit tests for `EntropyPenaltyCalculator` to ensure correct penalty calculations and handling of edge cases. - Added tests for symbol ID normalization in the reachability scanner. - Enhanced console status service with comprehensive unit tests for connection handling and error recovery. - Included Cosign tool version 2.6.0 with checksums for various platforms.
This commit is contained in:
@@ -5,18 +5,21 @@ Purpose: measure basic graph load/adjacency build and shallow path exploration o
|
||||
## Fixtures
|
||||
- Use interim synthetic fixtures under `samples/graph/interim/graph-50k` or `graph-100k`.
|
||||
- Each fixture includes `nodes.ndjson`, `edges.ndjson`, and `manifest.json` with hashes/counts.
|
||||
- Optional overlay: drop `overlay.ndjson` next to the fixture (or set `overlay.path` in `manifest.json`) to apply extra edges/layers; hashes are captured in results.
|
||||
|
||||
## Usage
|
||||
```bash
|
||||
python graph_bench.py \
|
||||
--fixture ../../../samples/graph/interim/graph-50k \
|
||||
--output results/graph-50k.json \
|
||||
--samples 100
|
||||
--samples 100 \
|
||||
--overlay ../../../samples/graph/interim/graph-50k/overlay.ndjson # optional
|
||||
```
|
||||
|
||||
Outputs a JSON summary with:
|
||||
- `nodes`, `edges`
|
||||
- `build_ms` — time to build adjacency (ms)
|
||||
- `overlay_ms` — time to apply overlay (0 when absent), plus counts and SHA under `overlay.*`
|
||||
- `bfs_ms` — total time for 3-depth BFS over sampled nodes
|
||||
- `avg_reach_3`, `max_reach_3` — nodes reached within depth 3
|
||||
- `manifest` — copied from fixture for traceability
|
||||
|
||||
Binary file not shown.
@@ -9,10 +9,11 @@ no network, and fixed seeds for reproducibility.
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
|
||||
def load_ndjson(path: Path):
|
||||
@@ -42,6 +43,52 @@ def build_graph(nodes_path: Path, edges_path: Path) -> Tuple[Dict[str, List[str]
|
||||
return adjacency, edge_count
|
||||
|
||||
|
||||
def _sha256(path: Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with path.open("rb") as f:
|
||||
for chunk in iter(lambda: f.read(8192), b""):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def apply_overlay(adjacency: Dict[str, List[str]], overlay_path: Path) -> Tuple[int, int]:
|
||||
"""
|
||||
Apply overlay edges to the adjacency map.
|
||||
|
||||
Overlay file format (NDJSON): {"source": "nodeA", "target": "nodeB"}
|
||||
Unknown keys are ignored. New nodes are added with empty adjacency to keep
|
||||
BFS deterministic. Duplicate edges are de-duplicated.
|
||||
"""
|
||||
|
||||
if not overlay_path.exists():
|
||||
return 0, 0
|
||||
|
||||
added_edges = 0
|
||||
introduced_nodes = set()
|
||||
for record in load_ndjson(overlay_path):
|
||||
source = record.get("source") or record.get("from")
|
||||
target = record.get("target") or record.get("to")
|
||||
if not source or not target:
|
||||
continue
|
||||
|
||||
if source not in adjacency:
|
||||
adjacency[source] = []
|
||||
introduced_nodes.add(source)
|
||||
if target not in adjacency:
|
||||
adjacency[target] = []
|
||||
introduced_nodes.add(target)
|
||||
|
||||
if target not in adjacency[source]:
|
||||
adjacency[source].append(target)
|
||||
added_edges += 1
|
||||
|
||||
# keep neighbor ordering deterministic
|
||||
for v in adjacency.values():
|
||||
v.sort()
|
||||
|
||||
return added_edges, len(introduced_nodes)
|
||||
|
||||
|
||||
def bfs_limited(adjacency: Dict[str, List[str]], start: str, max_depth: int = 3) -> int:
|
||||
visited = {start}
|
||||
frontier = [start]
|
||||
@@ -58,15 +105,41 @@ def bfs_limited(adjacency: Dict[str, List[str]], start: str, max_depth: int = 3)
|
||||
return len(visited)
|
||||
|
||||
|
||||
def run_bench(fixture_dir: Path, sample_size: int = 100) -> dict:
|
||||
def resolve_overlay_path(fixture_dir: Path, manifest: dict, explicit: Optional[Path]) -> Optional[Path]:
|
||||
if explicit:
|
||||
return explicit.resolve()
|
||||
|
||||
overlay_manifest = manifest.get("overlay") if isinstance(manifest, dict) else None
|
||||
if isinstance(overlay_manifest, dict):
|
||||
path_value = overlay_manifest.get("path")
|
||||
if path_value:
|
||||
candidate = Path(path_value)
|
||||
return candidate if candidate.is_absolute() else (fixture_dir / candidate)
|
||||
|
||||
default = fixture_dir / "overlay.ndjson"
|
||||
return default if default.exists() else None
|
||||
|
||||
|
||||
def run_bench(fixture_dir: Path, sample_size: int = 100, overlay_path: Optional[Path] = None) -> dict:
|
||||
nodes_path = fixture_dir / "nodes.ndjson"
|
||||
edges_path = fixture_dir / "edges.ndjson"
|
||||
manifest_path = fixture_dir / "manifest.json"
|
||||
|
||||
manifest = json.loads(manifest_path.read_text()) if manifest_path.exists() else {}
|
||||
overlay_resolved = resolve_overlay_path(fixture_dir, manifest, overlay_path)
|
||||
|
||||
t0 = time.perf_counter()
|
||||
adjacency, edge_count = build_graph(nodes_path, edges_path)
|
||||
overlay_added = 0
|
||||
overlay_nodes = 0
|
||||
overlay_hash = None
|
||||
overlay_ms = 0.0
|
||||
|
||||
if overlay_resolved:
|
||||
t_overlay = time.perf_counter()
|
||||
overlay_added, overlay_nodes = apply_overlay(adjacency, overlay_resolved)
|
||||
overlay_ms = (time.perf_counter() - t_overlay) * 1000
|
||||
overlay_hash = _sha256(overlay_resolved)
|
||||
build_ms = (time.perf_counter() - t0) * 1000
|
||||
|
||||
# deterministic sample: first N node ids sorted
|
||||
@@ -83,13 +156,21 @@ def run_bench(fixture_dir: Path, sample_size: int = 100) -> dict:
|
||||
return {
|
||||
"fixture": fixture_dir.name,
|
||||
"nodes": len(adjacency),
|
||||
"edges": edge_count,
|
||||
"edges": edge_count + overlay_added,
|
||||
"build_ms": round(build_ms, 2),
|
||||
"overlay_ms": round(overlay_ms, 2),
|
||||
"bfs_ms": round(bfs_ms, 2),
|
||||
"bfs_samples": len(node_ids),
|
||||
"avg_reach_3": round(avg_reach, 2),
|
||||
"max_reach_3": max_reach,
|
||||
"manifest": manifest,
|
||||
"overlay": {
|
||||
"applied": overlay_resolved is not None,
|
||||
"added_edges": overlay_added,
|
||||
"introduced_nodes": overlay_nodes,
|
||||
"path": str(overlay_resolved) if overlay_resolved else None,
|
||||
"sha256": overlay_hash,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -98,13 +179,15 @@ def main() -> int:
|
||||
parser.add_argument("--fixture", required=True, help="Path to fixture directory (nodes.ndjson, edges.ndjson)")
|
||||
parser.add_argument("--output", required=True, help="Path to write results JSON")
|
||||
parser.add_argument("--samples", type=int, default=100, help="Number of starting nodes to sample deterministically")
|
||||
parser.add_argument("--overlay", help="Optional overlay NDJSON path; defaults to overlay.ndjson next to fixture or manifest overlay.path")
|
||||
args = parser.parse_args()
|
||||
|
||||
fixture_dir = Path(args.fixture).resolve()
|
||||
out_path = Path(args.output).resolve()
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
result = run_bench(fixture_dir, sample_size=args.samples)
|
||||
explicit_overlay = Path(args.overlay).resolve() if args.overlay else None
|
||||
result = run_bench(fixture_dir, sample_size=args.samples, overlay_path=explicit_overlay)
|
||||
out_path.write_text(json.dumps(result, indent=2, sort_keys=True))
|
||||
print(f"Wrote results to {out_path}")
|
||||
return 0
|
||||
|
||||
@@ -6,6 +6,7 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${ROOT}/../../../.." && pwd)"
|
||||
FIXTURES_ROOT="${FIXTURES_ROOT:-${REPO_ROOT}/samples/graph/interim}"
|
||||
OUT_DIR="${OUT_DIR:-$ROOT/results}"
|
||||
OVERLAY_ROOT="${OVERLAY_ROOT:-${FIXTURES_ROOT}}"
|
||||
SAMPLES="${SAMPLES:-100}"
|
||||
|
||||
mkdir -p "${OUT_DIR}"
|
||||
@@ -15,7 +16,14 @@ run_one() {
|
||||
local name
|
||||
name="$(basename "${fixture}")"
|
||||
local out_file="${OUT_DIR}/${name}.json"
|
||||
python "${ROOT}/graph_bench.py" --fixture "${fixture}" --output "${out_file}" --samples "${SAMPLES}"
|
||||
local overlay_candidate="${OVERLAY_ROOT}/${name}/overlay.ndjson"
|
||||
|
||||
args=("--fixture" "${fixture}" "--output" "${out_file}" "--samples" "${SAMPLES}")
|
||||
if [[ -f "${overlay_candidate}" ]]; then
|
||||
args+=("--overlay" "${overlay_candidate}")
|
||||
fi
|
||||
|
||||
python "${ROOT}/graph_bench.py" "${args[@]}"
|
||||
}
|
||||
|
||||
run_one "${FIXTURES_ROOT}/graph-50k"
|
||||
|
||||
Binary file not shown.
63
src/Bench/StellaOps.Bench/Graph/tests/test_graph_bench.py
Normal file
63
src/Bench/StellaOps.Bench/Graph/tests/test_graph_bench.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import json
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import unittest
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
if str(ROOT) not in sys.path:
|
||||
sys.path.insert(0, str(ROOT))
|
||||
|
||||
|
||||
class GraphBenchTests(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.tmp = tempfile.TemporaryDirectory()
|
||||
self.root = Path(self.tmp.name)
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self.tmp.cleanup()
|
||||
|
||||
def _write_ndjson(self, path: Path, records):
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open("w", encoding="utf-8") as f:
|
||||
for record in records:
|
||||
f.write(json.dumps(record))
|
||||
f.write("\n")
|
||||
|
||||
def test_overlay_edges_are_applied_and_counted(self):
|
||||
from graph_bench import run_bench
|
||||
|
||||
fixture = self.root / "fixture"
|
||||
fixture.mkdir()
|
||||
|
||||
self._write_ndjson(fixture / "nodes.ndjson", [{"id": "a"}, {"id": "b"}])
|
||||
self._write_ndjson(fixture / "edges.ndjson", [{"source": "a", "target": "b"}])
|
||||
self._write_ndjson(fixture / "overlay.ndjson", [{"source": "b", "target": "a"}])
|
||||
|
||||
result = run_bench(fixture, sample_size=2)
|
||||
|
||||
self.assertEqual(result["nodes"], 2)
|
||||
self.assertEqual(result["edges"], 2) # overlay added one edge
|
||||
self.assertTrue(result["overlay"]["applied"])
|
||||
self.assertEqual(result["overlay"]["added_edges"], 1)
|
||||
self.assertEqual(result["overlay"]["introduced_nodes"], 0)
|
||||
|
||||
def test_overlay_is_optional(self):
|
||||
from graph_bench import run_bench
|
||||
|
||||
fixture = self.root / "fixture-no-overlay"
|
||||
fixture.mkdir()
|
||||
|
||||
self._write_ndjson(fixture / "nodes.ndjson", [{"id": "x"}, {"id": "y"}])
|
||||
self._write_ndjson(fixture / "edges.ndjson", [{"source": "x", "target": "y"}])
|
||||
|
||||
result = run_bench(fixture, sample_size=2)
|
||||
|
||||
self.assertEqual(result["edges"], 1)
|
||||
self.assertFalse(result["overlay"]["applied"])
|
||||
self.assertEqual(result["overlay"]["added_edges"], 0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -7,18 +7,57 @@
|
||||
*/
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import crypto from "crypto";
|
||||
|
||||
function readJson(p) {
|
||||
return JSON.parse(fs.readFileSync(p, "utf-8"));
|
||||
}
|
||||
|
||||
function buildPlan(scenarios, manifest, fixtureName) {
|
||||
function sha256File(filePath) {
|
||||
const hash = crypto.createHash("sha256");
|
||||
hash.update(fs.readFileSync(filePath));
|
||||
return hash.digest("hex");
|
||||
}
|
||||
|
||||
function resolveOverlay(fixtureDir, manifest) {
|
||||
const manifestOverlay = manifest?.overlay?.path;
|
||||
const candidate = manifestOverlay
|
||||
? path.isAbsolute(manifestOverlay)
|
||||
? manifestOverlay
|
||||
: path.join(fixtureDir, manifestOverlay)
|
||||
: path.join(fixtureDir, "overlay.ndjson");
|
||||
|
||||
if (!fs.existsSync(candidate)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
path: candidate,
|
||||
sha256: sha256File(candidate),
|
||||
};
|
||||
}
|
||||
|
||||
function buildPlan(scenarios, manifest, fixtureName, fixtureDir) {
|
||||
const now = new Date().toISOString();
|
||||
const seed = process.env.UI_BENCH_SEED || "424242";
|
||||
const traceId =
|
||||
process.env.UI_BENCH_TRACE_ID ||
|
||||
(crypto.randomUUID ? crypto.randomUUID() : `trace-${Date.now()}`);
|
||||
const overlay = resolveOverlay(fixtureDir, manifest);
|
||||
|
||||
return {
|
||||
version: "1.0.0",
|
||||
fixture: fixtureName,
|
||||
manifestHash: manifest?.hashes || {},
|
||||
overlay,
|
||||
timestamp: now,
|
||||
seed,
|
||||
traceId,
|
||||
viewport: {
|
||||
width: 1280,
|
||||
height: 720,
|
||||
deviceScaleFactor: 1,
|
||||
},
|
||||
steps: scenarios.map((s, idx) => ({
|
||||
order: idx + 1,
|
||||
id: s.id,
|
||||
@@ -41,7 +80,12 @@ function main() {
|
||||
const manifest = fs.existsSync(manifestPath) ? readJson(manifestPath) : {};
|
||||
const scenarios = readJson(scenariosPath).scenarios || [];
|
||||
|
||||
const plan = buildPlan(scenarios, manifest, path.basename(fixtureDir));
|
||||
const plan = buildPlan(
|
||||
scenarios,
|
||||
manifest,
|
||||
path.basename(fixtureDir),
|
||||
fixtureDir
|
||||
);
|
||||
fs.mkdirSync(path.dirname(outputPath), { recursive: true });
|
||||
fs.writeFileSync(outputPath, JSON.stringify(plan, null, 2));
|
||||
console.log(`Wrote plan to ${outputPath}`);
|
||||
|
||||
42
src/Bench/StellaOps.Bench/Graph/ui_bench_driver.test.mjs
Normal file
42
src/Bench/StellaOps.Bench/Graph/ui_bench_driver.test.mjs
Normal file
@@ -0,0 +1,42 @@
|
||||
import assert from "node:assert";
|
||||
import { test } from "node:test";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { spawnSync } from "node:child_process";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
test("ui bench driver emits overlay + seed metadata", () => {
|
||||
const tmp = fs.mkdtempSync(path.join(process.cwd(), "tmp-ui-bench-"));
|
||||
const fixtureDir = path.join(tmp, "fixture");
|
||||
fs.mkdirSync(fixtureDir, { recursive: true });
|
||||
|
||||
// minimal fixture files
|
||||
fs.writeFileSync(path.join(fixtureDir, "manifest.json"), JSON.stringify({ hashes: { nodes: "abc" } }));
|
||||
fs.writeFileSync(path.join(fixtureDir, "overlay.ndjson"), "{\"source\":\"a\",\"target\":\"b\"}\n");
|
||||
|
||||
const scenariosPath = path.join(tmp, "scenarios.json");
|
||||
fs.writeFileSync(
|
||||
scenariosPath,
|
||||
JSON.stringify({ version: "1.0.0", scenarios: [{ id: "load", name: "Load", steps: ["navigate"] }] })
|
||||
);
|
||||
|
||||
const outputPath = path.join(tmp, "plan.json");
|
||||
const env = { ...process.env, UI_BENCH_SEED: "1337", UI_BENCH_TRACE_ID: "trace-test" };
|
||||
const driverPath = path.join(__dirname, "ui_bench_driver.mjs");
|
||||
const result = spawnSync(process.execPath, [driverPath, fixtureDir, scenariosPath, outputPath], { env });
|
||||
assert.strictEqual(result.status, 0, result.stderr?.toString());
|
||||
|
||||
const plan = JSON.parse(fs.readFileSync(outputPath, "utf-8"));
|
||||
assert.strictEqual(plan.fixture, "fixture");
|
||||
assert.strictEqual(plan.seed, "1337");
|
||||
assert.strictEqual(plan.traceId, "trace-test");
|
||||
assert.ok(plan.overlay);
|
||||
assert.ok(plan.overlay.path.endsWith("overlay.ndjson"));
|
||||
assert.ok(plan.overlay.sha256);
|
||||
assert.deepStrictEqual(plan.viewport, { width: 1280, height: 720, deviceScaleFactor: 1 });
|
||||
|
||||
fs.rmSync(tmp, { recursive: true, force: true });
|
||||
});
|
||||
@@ -4,6 +4,7 @@ Purpose: provide a deterministic, headless flow for measuring graph UI interacti
|
||||
|
||||
## Scope
|
||||
- Use synthetic fixtures under `samples/graph/interim/` until canonical SAMPLES-GRAPH-24-003 lands.
|
||||
- Optional overlay layer (`overlay.ndjson`) is loaded when present and toggled during the run to capture render/merge overhead.
|
||||
- Drive a deterministic sequence of interactions:
|
||||
1) Load graph canvas with specified fixture.
|
||||
2) Pan to node `pkg-000001`.
|
||||
@@ -11,7 +12,7 @@ Purpose: provide a deterministic, headless flow for measuring graph UI interacti
|
||||
4) Apply filter `name contains "package-0001"`.
|
||||
5) Select node, expand neighbors (depth 1), collapse.
|
||||
6) Toggle overlay layer (once available).
|
||||
- Capture timings: initial render, filter apply, expand/collapse, overlay toggle.
|
||||
- Capture timings: initial render, filter apply, expand/collapse, overlay toggle (when available).
|
||||
|
||||
## Determinism rules
|
||||
- Fixed seed for any randomized layouts (seed `424242`).
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
|
||||
### Unreleased
|
||||
|
||||
No analyzer rules currently scheduled for release.
|
||||
- CONCELIER0004: Flag direct `new HttpClient()` usage inside `StellaOps.Concelier.Connector*` namespaces; require sandboxed `IHttpClientFactory` to enforce allow/deny lists.
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.CodeAnalysis;
|
||||
using Microsoft.CodeAnalysis.CSharp;
|
||||
using Microsoft.CodeAnalysis.CSharp.Syntax;
|
||||
using Microsoft.CodeAnalysis.Diagnostics;
|
||||
|
||||
namespace StellaOps.Concelier.Analyzers;
|
||||
|
||||
[DiagnosticAnalyzer(LanguageNames.CSharp)]
|
||||
public sealed class ConnectorHttpClientSandboxAnalyzer : DiagnosticAnalyzer
|
||||
{
|
||||
public const string DiagnosticId = "CONCELIER0004";
|
||||
|
||||
private static readonly DiagnosticDescriptor Rule = new(
|
||||
id: DiagnosticId,
|
||||
title: "Connector HTTP clients must use sandboxed factory",
|
||||
messageFormat: "Use IHttpClientFactory or connector sandbox helpers instead of 'new HttpClient()' inside Concelier connectors.",
|
||||
category: "Sandbox",
|
||||
defaultSeverity: DiagnosticSeverity.Warning,
|
||||
isEnabledByDefault: true,
|
||||
description: "Direct HttpClient construction bypasses connector allowlist/denylist and proxy policies. Use IHttpClientFactory or sandboxed handlers.");
|
||||
|
||||
public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics => ImmutableArray.Create(Rule);
|
||||
|
||||
public override void Initialize(AnalysisContext context)
|
||||
{
|
||||
context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.None);
|
||||
context.EnableConcurrentExecution();
|
||||
context.RegisterSyntaxNodeAction(AnalyzeObjectCreation, SyntaxKind.ObjectCreationExpression);
|
||||
}
|
||||
|
||||
private static void AnalyzeObjectCreation(SyntaxNodeAnalysisContext context)
|
||||
{
|
||||
if (context.Node is not ObjectCreationExpressionSyntax objectCreation)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var type = context.SemanticModel.GetTypeInfo(objectCreation, context.CancellationToken).Type;
|
||||
if (type?.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat) != "global::System.Net.Http.HttpClient")
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var containingSymbol = context.ContainingSymbol?.ContainingNamespace?.ToDisplayString();
|
||||
if (containingSymbol is null || !containingSymbol.StartsWith("StellaOps.Concelier.Connector"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
context.ReportDiagnostic(Diagnostic.Create(Rule, objectCreation.GetLocation()));
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Concelier.Models.Observations;
|
||||
|
||||
@@ -29,6 +31,24 @@ public sealed class AdvisoryObservationWriteGuard : IAdvisoryObservationWriteGua
|
||||
ArgumentNullException.ThrowIfNull(observation);
|
||||
|
||||
var newContentHash = observation.Upstream.ContentHash;
|
||||
var signature = observation.Upstream.Signature;
|
||||
|
||||
if (!IsSha256(newContentHash))
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Observation {ObservationId} rejected: content hash must be canonical sha256:<hex64> but was {ContentHash}",
|
||||
observation.ObservationId,
|
||||
newContentHash);
|
||||
return ObservationWriteDisposition.RejectInvalidProvenance;
|
||||
}
|
||||
|
||||
if (!SignatureShapeIsValid(signature))
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Observation {ObservationId} rejected: signature metadata missing or inconsistent for provenance enforcement",
|
||||
observation.ObservationId);
|
||||
return ObservationWriteDisposition.RejectInvalidProvenance;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(existingContentHash))
|
||||
{
|
||||
@@ -56,4 +76,36 @@ public sealed class AdvisoryObservationWriteGuard : IAdvisoryObservationWriteGua
|
||||
|
||||
return ObservationWriteDisposition.RejectMutation;
|
||||
}
|
||||
|
||||
private static bool IsSha256(string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
|
||||
&& value.Length == "sha256:".Length + 64
|
||||
&& value["sha256:".Length..].All(c => Uri.IsHexDigit(c));
|
||||
}
|
||||
|
||||
private static bool SignatureShapeIsValid(AdvisoryObservationSignature signature)
|
||||
{
|
||||
if (signature is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (signature.Present)
|
||||
{
|
||||
return !string.IsNullOrWhiteSpace(signature.Format)
|
||||
&& !string.IsNullOrWhiteSpace(signature.KeyId)
|
||||
&& !string.IsNullOrWhiteSpace(signature.Signature);
|
||||
}
|
||||
|
||||
// When signature is not present, auxiliary fields must be empty to prevent stale metadata.
|
||||
return string.IsNullOrEmpty(signature.Format)
|
||||
&& string.IsNullOrEmpty(signature.KeyId)
|
||||
&& string.IsNullOrEmpty(signature.Signature);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +32,11 @@ public enum ObservationWriteDisposition
|
||||
/// </summary>
|
||||
SkipIdentical,
|
||||
|
||||
/// <summary>
|
||||
/// Observation is malformed (missing provenance/signature/hash guarantees) and must be rejected.
|
||||
/// </summary>
|
||||
RejectInvalidProvenance,
|
||||
|
||||
/// <summary>
|
||||
/// Observation differs from existing - reject mutation (append-only violation).
|
||||
/// </summary>
|
||||
|
||||
@@ -15,6 +15,10 @@ namespace StellaOps.Concelier.Core.Tests.Aoc;
|
||||
/// </summary>
|
||||
public sealed class AdvisoryObservationWriteGuardTests
|
||||
{
|
||||
private const string HashA = "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
|
||||
private const string HashB = "sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb";
|
||||
private const string HashC = "sha256:cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc";
|
||||
|
||||
private readonly AdvisoryObservationWriteGuard _guard;
|
||||
|
||||
public AdvisoryObservationWriteGuardTests()
|
||||
@@ -26,7 +30,7 @@ public sealed class AdvisoryObservationWriteGuardTests
|
||||
public void ValidateWrite_NewObservation_ReturnsProceed()
|
||||
{
|
||||
// Arrange
|
||||
var observation = CreateObservation("obs-1", "sha256:abc123");
|
||||
var observation = CreateObservation("obs-1", HashA);
|
||||
|
||||
// Act
|
||||
var result = _guard.ValidateWrite(observation, existingContentHash: null);
|
||||
@@ -39,7 +43,7 @@ public sealed class AdvisoryObservationWriteGuardTests
|
||||
public void ValidateWrite_NewObservation_WithEmptyExistingHash_ReturnsProceed()
|
||||
{
|
||||
// Arrange
|
||||
var observation = CreateObservation("obs-2", "sha256:def456");
|
||||
var observation = CreateObservation("obs-2", HashB);
|
||||
|
||||
// Act
|
||||
var result = _guard.ValidateWrite(observation, existingContentHash: "");
|
||||
@@ -52,7 +56,7 @@ public sealed class AdvisoryObservationWriteGuardTests
|
||||
public void ValidateWrite_NewObservation_WithWhitespaceExistingHash_ReturnsProceed()
|
||||
{
|
||||
// Arrange
|
||||
var observation = CreateObservation("obs-3", "sha256:ghi789");
|
||||
var observation = CreateObservation("obs-3", HashC);
|
||||
|
||||
// Act
|
||||
var result = _guard.ValidateWrite(observation, existingContentHash: " ");
|
||||
@@ -65,11 +69,10 @@ public sealed class AdvisoryObservationWriteGuardTests
|
||||
public void ValidateWrite_IdenticalContent_ReturnsSkipIdentical()
|
||||
{
|
||||
// Arrange
|
||||
const string contentHash = "sha256:abc123";
|
||||
var observation = CreateObservation("obs-4", contentHash);
|
||||
var observation = CreateObservation("obs-4", HashA);
|
||||
|
||||
// Act
|
||||
var result = _guard.ValidateWrite(observation, existingContentHash: contentHash);
|
||||
var result = _guard.ValidateWrite(observation, existingContentHash: HashA);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(ObservationWriteDisposition.SkipIdentical);
|
||||
@@ -79,10 +82,10 @@ public sealed class AdvisoryObservationWriteGuardTests
|
||||
public void ValidateWrite_IdenticalContent_CaseInsensitive_ReturnsSkipIdentical()
|
||||
{
|
||||
// Arrange
|
||||
var observation = CreateObservation("obs-5", "SHA256:ABC123");
|
||||
var observation = CreateObservation("obs-5", "SHA256:AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA");
|
||||
|
||||
// Act
|
||||
var result = _guard.ValidateWrite(observation, existingContentHash: "sha256:abc123");
|
||||
var result = _guard.ValidateWrite(observation, existingContentHash: HashA);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(ObservationWriteDisposition.SkipIdentical);
|
||||
@@ -92,10 +95,10 @@ public sealed class AdvisoryObservationWriteGuardTests
|
||||
public void ValidateWrite_DifferentContent_ReturnsRejectMutation()
|
||||
{
|
||||
// Arrange
|
||||
var observation = CreateObservation("obs-6", "sha256:newcontent");
|
||||
var observation = CreateObservation("obs-6", HashB);
|
||||
|
||||
// Act
|
||||
var result = _guard.ValidateWrite(observation, existingContentHash: "sha256:oldcontent");
|
||||
var result = _guard.ValidateWrite(observation, existingContentHash: HashA);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(ObservationWriteDisposition.RejectMutation);
|
||||
@@ -113,9 +116,8 @@ public sealed class AdvisoryObservationWriteGuardTests
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("sha256:a", "sha256:b")]
|
||||
[InlineData("sha256:hash1", "sha256:hash2")]
|
||||
[InlineData("md5:abc", "sha256:abc")]
|
||||
[InlineData(HashB, HashC)]
|
||||
[InlineData(HashC, HashA)]
|
||||
public void ValidateWrite_ContentMismatch_ReturnsRejectMutation(string newHash, string existingHash)
|
||||
{
|
||||
// Arrange
|
||||
@@ -129,9 +131,8 @@ public sealed class AdvisoryObservationWriteGuardTests
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("sha256:identical")]
|
||||
[InlineData("SHA256:IDENTICAL")]
|
||||
[InlineData("sha512:longerhash1234567890")]
|
||||
[InlineData(HashA)]
|
||||
[InlineData("SHA256:BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB")]
|
||||
public void ValidateWrite_ExactMatch_ReturnsSkipIdentical(string hash)
|
||||
{
|
||||
// Arrange
|
||||
@@ -144,7 +145,41 @@ public sealed class AdvisoryObservationWriteGuardTests
|
||||
result.Should().Be(ObservationWriteDisposition.SkipIdentical);
|
||||
}
|
||||
|
||||
private static AdvisoryObservation CreateObservation(string observationId, string contentHash)
|
||||
[Theory]
|
||||
[InlineData("md5:abc")]
|
||||
[InlineData("sha256:short")]
|
||||
public void ValidateWrite_InvalidHash_ReturnsRejectInvalidProvenance(string hash)
|
||||
{
|
||||
var observation = CreateObservation("obs-invalid-hash", hash);
|
||||
|
||||
var result = _guard.ValidateWrite(observation, existingContentHash: null);
|
||||
|
||||
result.Should().Be(ObservationWriteDisposition.RejectInvalidProvenance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateWrite_SignaturePresentMissingFields_ReturnsRejectInvalidProvenance()
|
||||
{
|
||||
var badSignature = new AdvisoryObservationSignature(true, null, null, null);
|
||||
var observation = CreateObservation("obs-bad-sig", HashA, badSignature);
|
||||
|
||||
var result = _guard.ValidateWrite(observation, existingContentHash: null);
|
||||
|
||||
result.Should().Be(ObservationWriteDisposition.RejectInvalidProvenance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Observation_TenantIsLowercased()
|
||||
{
|
||||
var observation = CreateObservation("obs-tenant", HashA, tenant: "Tenant:Mixed");
|
||||
observation.Tenant.Should().Be("tenant:mixed");
|
||||
}
|
||||
|
||||
private static AdvisoryObservation CreateObservation(
|
||||
string observationId,
|
||||
string contentHash,
|
||||
AdvisoryObservationSignature? signatureOverride = null,
|
||||
string tenant = "test-tenant")
|
||||
{
|
||||
var source = new AdvisoryObservationSource(
|
||||
vendor: "test-vendor",
|
||||
@@ -152,11 +187,11 @@ public sealed class AdvisoryObservationWriteGuardTests
|
||||
api: "test-api",
|
||||
collectorVersion: "1.0.0");
|
||||
|
||||
var signature = new AdvisoryObservationSignature(
|
||||
present: false,
|
||||
format: null,
|
||||
keyId: null,
|
||||
signature: null);
|
||||
var signature = signatureOverride ?? new AdvisoryObservationSignature(
|
||||
present: true,
|
||||
format: "dsse",
|
||||
keyId: "test-key",
|
||||
signature: "ZmFrZS1zaWc=");
|
||||
|
||||
var upstream = new AdvisoryObservationUpstream(
|
||||
upstreamId: $"upstream-{observationId}",
|
||||
@@ -184,7 +219,7 @@ public sealed class AdvisoryObservationWriteGuardTests
|
||||
|
||||
return new AdvisoryObservation(
|
||||
observationId: observationId,
|
||||
tenant: "test-tenant",
|
||||
tenant: tenant,
|
||||
source: source,
|
||||
upstream: upstream,
|
||||
content: content,
|
||||
|
||||
@@ -0,0 +1,86 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Concelier.Models;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Tests.Linksets;
|
||||
|
||||
/// <summary>
|
||||
/// Determinism and provenance-focused tests aligned with CI1–CI10 gap remediation.
|
||||
/// </summary>
|
||||
public sealed class AdvisoryLinksetDeterminismTests
|
||||
{
|
||||
[Fact]
|
||||
public void IdempotencyKey_IsStableAcrossObservationOrdering()
|
||||
{
|
||||
// Arrange
|
||||
var createdAt = new DateTimeOffset(2025, 12, 2, 0, 0, 0, TimeSpan.Zero);
|
||||
var observationsA = ImmutableArray.Create("obs-b", "obs-a");
|
||||
var observationsB = ImmutableArray.Create("obs-a", "obs-b");
|
||||
|
||||
var linksetA = new AdvisoryLinkset(
|
||||
TenantId: "tenant-a",
|
||||
Source: "nvd",
|
||||
AdvisoryId: "CVE-2025-9999",
|
||||
ObservationIds: observationsA,
|
||||
Normalized: null,
|
||||
Provenance: new AdvisoryLinksetProvenance(
|
||||
ObservationHashes: new[] { "sha256:1111", "sha256:2222" },
|
||||
ToolVersion: "1.0.0",
|
||||
PolicyHash: "policy-hash-1"),
|
||||
Confidence: 0.8,
|
||||
Conflicts: null,
|
||||
CreatedAt: createdAt,
|
||||
BuiltByJobId: "job-1");
|
||||
|
||||
var linksetB = linksetA with { ObservationIds = observationsB };
|
||||
|
||||
// Act
|
||||
var evtA = AdvisoryLinksetUpdatedEvent.FromLinkset(linksetA, null, "linkset-1", null);
|
||||
var evtB = AdvisoryLinksetUpdatedEvent.FromLinkset(linksetB, null, "linkset-1", null);
|
||||
|
||||
// Assert
|
||||
evtA.IdempotencyKey.Should().Be(evtB.IdempotencyKey);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Conflicts_AreDeterministicallyDedupedAndSourcesFilled()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = new[]
|
||||
{
|
||||
new LinksetCorrelation.Input(
|
||||
Vendor: "nvd",
|
||||
FetchedAt: DateTimeOffset.Parse("2025-12-01T00:00:00Z"),
|
||||
Aliases: new[] { "CVE-2025-1111" },
|
||||
Purls: Array.Empty<string>(),
|
||||
Cpes: Array.Empty<string>(),
|
||||
References: Array.Empty<string>()),
|
||||
new LinksetCorrelation.Input(
|
||||
Vendor: "vendor",
|
||||
FetchedAt: DateTimeOffset.Parse("2025-12-01T00:05:00Z"),
|
||||
Aliases: new[] { "CVE-2025-2222" },
|
||||
Purls: Array.Empty<string>(),
|
||||
Cpes: Array.Empty<string>(),
|
||||
References: Array.Empty<string>())
|
||||
};
|
||||
|
||||
var duplicateConflicts = new List<AdvisoryLinksetConflict>
|
||||
{
|
||||
new("aliases", "alias-inconsistency", new[] { "nvd:CVE-2025-1111", "vendor:CVE-2025-2222" }, null),
|
||||
new("aliases", "alias-inconsistency", new[] { "nvd:CVE-2025-1111", "vendor:CVE-2025-2222" }, Array.Empty<string>())
|
||||
};
|
||||
|
||||
// Act
|
||||
var (_, conflicts) = LinksetCorrelation.Compute(inputs, duplicateConflicts);
|
||||
|
||||
// Assert
|
||||
conflicts.Should().HaveCount(1);
|
||||
conflicts[0].Field.Should().Be("aliases");
|
||||
conflicts[0].Reason.Should().Be("alias-inconsistency");
|
||||
conflicts[0].SourceIds.Should().ContainInOrder("nvd", "vendor");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Tests.Schemas;
|
||||
|
||||
/// <summary>
|
||||
/// Verifies schema bundle digests and offline bundle sample constraints (CI1–CI10).
|
||||
/// </summary>
|
||||
public sealed class SchemaManifestTests
|
||||
{
|
||||
private static readonly string SchemaRoot = ResolveSchemaRoot();
|
||||
|
||||
[Fact]
|
||||
public void SchemaManifest_DigestsMatchFilesystem()
|
||||
{
|
||||
var manifestPath = Path.Combine(SchemaRoot, "schema.manifest.json");
|
||||
using var doc = JsonDocument.Parse(File.ReadAllText(manifestPath));
|
||||
|
||||
var files = doc.RootElement.GetProperty("files").EnumerateArray().ToArray();
|
||||
files.Should().NotBeEmpty("schema manifest must contain at least one entry");
|
||||
|
||||
foreach (var fileEl in files)
|
||||
{
|
||||
var path = fileEl.GetProperty("path").GetString()!;
|
||||
var expected = fileEl.GetProperty("sha256").GetString()!;
|
||||
|
||||
var fullPath = Path.Combine(SchemaRoot, path);
|
||||
File.Exists(fullPath).Should().BeTrue($"manifest entry {path} should exist");
|
||||
|
||||
var actual = ComputeSha256(fullPath);
|
||||
actual.Should().Be(expected, $"digest for {path} should be canonical");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void OfflineBundleSample_RespectsStalenessAndHashes()
|
||||
{
|
||||
var samplePath = Path.Combine(SchemaRoot, "samples/offline-advisory-bundle.sample.json");
|
||||
using var doc = JsonDocument.Parse(File.ReadAllText(samplePath));
|
||||
|
||||
var snapshot = doc.RootElement.GetProperty("snapshot");
|
||||
var staleness = snapshot.GetProperty("stalenessHours").GetInt32();
|
||||
staleness.Should().BeLessOrEqualTo(168, "offline bundles must cap snapshot staleness to 7 days");
|
||||
|
||||
var manifest = doc.RootElement.GetProperty("manifest").EnumerateArray().ToArray();
|
||||
manifest.Should().NotBeEmpty();
|
||||
foreach (var entry in manifest)
|
||||
{
|
||||
var hash = entry.GetProperty("sha256").GetString()!;
|
||||
hash.Length.Should().Be(64);
|
||||
}
|
||||
|
||||
var hashes = doc.RootElement.GetProperty("hashes");
|
||||
hashes.GetProperty("sha256").GetString()!.Length.Should().Be(64);
|
||||
}
|
||||
|
||||
private static string ComputeSha256(string path)
|
||||
{
|
||||
using var sha = SHA256.Create();
|
||||
using var stream = File.OpenRead(path);
|
||||
var hash = sha.ComputeHash(stream);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string ResolveSchemaRoot()
|
||||
{
|
||||
var current = AppContext.BaseDirectory;
|
||||
while (!string.IsNullOrEmpty(current))
|
||||
{
|
||||
var candidate = Path.Combine(current, "docs/modules/concelier/schemas");
|
||||
if (Directory.Exists(candidate))
|
||||
{
|
||||
return candidate;
|
||||
}
|
||||
|
||||
current = Directory.GetParent(current)?.FullName;
|
||||
}
|
||||
|
||||
throw new DirectoryNotFoundException("Unable to locate docs/modules/concelier/schemas from test base directory.");
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,7 @@ using StellaOps.Findings.Ledger.WebService.Mappings;
|
||||
using StellaOps.Findings.Ledger.WebService.Services;
|
||||
using StellaOps.Telemetry.Core;
|
||||
using StellaOps.Findings.Ledger.Services.Security;
|
||||
using StellaOps.Findings.Ledger.Observability;
|
||||
|
||||
const string LedgerWritePolicy = "ledger.events.write";
|
||||
const string LedgerExportPolicy = "ledger.export.read";
|
||||
@@ -45,6 +46,8 @@ var bootstrapOptions = builder.Configuration.BindOptions<LedgerServiceOptions>(
|
||||
LedgerServiceOptions.SectionName,
|
||||
(opts, _) => opts.Validate());
|
||||
|
||||
LedgerMetrics.ConfigureQuotas(bootstrapOptions.Quotas.MaxIngestBacklog);
|
||||
|
||||
builder.Host.UseSerilog((context, services, loggerConfiguration) =>
|
||||
{
|
||||
loggerConfiguration
|
||||
|
||||
@@ -21,7 +21,7 @@ public sealed class LedgerAnchorQueue
|
||||
public ValueTask EnqueueAsync(LedgerEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
var writeTask = _channel.Writer.WriteAsync(record, cancellationToken);
|
||||
LedgerMetrics.IncrementBacklog();
|
||||
LedgerMetrics.IncrementBacklog(record.TenantId);
|
||||
return writeTask;
|
||||
}
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ public sealed class LedgerMerkleAnchorWorker : BackgroundService
|
||||
{
|
||||
await foreach (var record in _queue.ReadAllAsync(stoppingToken))
|
||||
{
|
||||
LedgerMetrics.DecrementBacklog();
|
||||
LedgerMetrics.DecrementBacklog(record.TenantId);
|
||||
await HandleEventAsync(record, stoppingToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.Reflection;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Observability;
|
||||
|
||||
@@ -22,6 +24,14 @@ internal static class LedgerMetrics
|
||||
"ledger_events_total",
|
||||
description: "Number of ledger events appended.");
|
||||
|
||||
private static readonly Counter<long> BackpressureApplied = Meter.CreateCounter<long>(
|
||||
"ledger_backpressure_applied_total",
|
||||
description: "Times ingest backpressure thresholds were exceeded.");
|
||||
|
||||
private static readonly Counter<long> QuotaRejections = Meter.CreateCounter<long>(
|
||||
"ledger_quota_rejections_total",
|
||||
description: "Requests rejected due to configured quotas.");
|
||||
|
||||
private static readonly Histogram<double> ProjectionApplySeconds = Meter.CreateHistogram<double>(
|
||||
"ledger_projection_apply_seconds",
|
||||
unit: "s",
|
||||
@@ -45,21 +55,38 @@ internal static class LedgerMetrics
|
||||
"ledger_merkle_anchor_failures_total",
|
||||
description: "Count of Merkle anchor failures by reason.");
|
||||
|
||||
private static readonly Counter<long> AttachmentsEncryptionFailures = Meter.CreateCounter<long>(
|
||||
"ledger_attachments_encryption_failures_total",
|
||||
description: "Count of attachment encryption/signing/upload failures.");
|
||||
|
||||
private static readonly ObservableGauge<double> ProjectionLagGauge =
|
||||
Meter.CreateObservableGauge("ledger_projection_lag_seconds", ObserveProjectionLag, unit: "s",
|
||||
description: "Lag between ledger recorded_at and projection application time.");
|
||||
|
||||
private static readonly ObservableGauge<long> IngestBacklogGauge =
|
||||
Meter.CreateObservableGauge("ledger_ingest_backlog_events", ObserveBacklog,
|
||||
description: "Number of events buffered for ingestion/anchoring.");
|
||||
description: "Number of events buffered for ingestion/anchoring per tenant.");
|
||||
|
||||
private static readonly ObservableGauge<long> QuotaRemainingGauge =
|
||||
Meter.CreateObservableGauge("ledger_quota_remaining", ObserveQuotaRemaining,
|
||||
description: "Remaining ingest backlog capacity before backpressure applies.");
|
||||
|
||||
private static readonly ObservableGauge<long> DbConnectionsGauge =
|
||||
Meter.CreateObservableGauge("ledger_db_connections_active", ObserveDbConnections,
|
||||
description: "Active PostgreSQL connections by role.");
|
||||
|
||||
private static readonly ObservableGauge<long> AppVersionGauge =
|
||||
Meter.CreateObservableGauge("ledger_app_version_info", ObserveAppVersion,
|
||||
description: "Static gauge exposing build version and git sha.");
|
||||
|
||||
private static readonly ConcurrentDictionary<string, double> ProjectionLagByTenant = new(StringComparer.Ordinal);
|
||||
private static readonly ConcurrentDictionary<string, long> DbConnectionsByRole = new(StringComparer.OrdinalIgnoreCase);
|
||||
private static long _ingestBacklog;
|
||||
private static readonly ConcurrentDictionary<string, long> BacklogByTenant = new(StringComparer.Ordinal);
|
||||
|
||||
private static long _ingestBacklogLimit = 5000;
|
||||
|
||||
private static readonly string AppVersion = Assembly.GetExecutingAssembly().GetName().Version?.ToString() ?? "0.0.0";
|
||||
private static readonly string GitSha = Environment.GetEnvironmentVariable("GIT_SHA") ?? "unknown";
|
||||
|
||||
public static void RecordWriteSuccess(TimeSpan duration, string? tenantId, string? eventType, string? source)
|
||||
{
|
||||
@@ -127,17 +154,55 @@ internal static class LedgerMetrics
|
||||
MerkleAnchorFailures.Add(1, tags);
|
||||
}
|
||||
|
||||
public static void IncrementBacklog() => Interlocked.Increment(ref _ingestBacklog);
|
||||
|
||||
public static void DecrementBacklog()
|
||||
public static void RecordAttachmentFailure(string tenantId, string stage)
|
||||
{
|
||||
var value = Interlocked.Decrement(ref _ingestBacklog);
|
||||
if (value < 0)
|
||||
var tags = new KeyValuePair<string, object?>[]
|
||||
{
|
||||
Interlocked.Exchange(ref _ingestBacklog, 0);
|
||||
new("tenant", tenantId),
|
||||
new("stage", stage)
|
||||
};
|
||||
AttachmentsEncryptionFailures.Add(1, tags);
|
||||
}
|
||||
|
||||
public static void ConfigureQuotas(long ingestBacklogLimit)
|
||||
{
|
||||
if (ingestBacklogLimit > 0)
|
||||
{
|
||||
Interlocked.Exchange(ref _ingestBacklogLimit, ingestBacklogLimit);
|
||||
}
|
||||
}
|
||||
|
||||
public static long IncrementBacklog(string? tenantId = null)
|
||||
{
|
||||
var key = NormalizeTenant(tenantId);
|
||||
var backlog = BacklogByTenant.AddOrUpdate(key, _ => 1, (_, current) => current + 1);
|
||||
if (backlog > _ingestBacklogLimit)
|
||||
{
|
||||
BackpressureApplied.Add(1, new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("tenant", key),
|
||||
new("reason", "ingest_backlog"),
|
||||
new("limit", _ingestBacklogLimit)
|
||||
});
|
||||
}
|
||||
return backlog;
|
||||
}
|
||||
|
||||
public static void RecordQuotaRejection(string tenantId, string reason)
|
||||
{
|
||||
QuotaRejections.Add(1, new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("tenant", NormalizeTenant(tenantId)),
|
||||
new("reason", reason)
|
||||
});
|
||||
}
|
||||
|
||||
public static void DecrementBacklog(string? tenantId = null)
|
||||
{
|
||||
var key = NormalizeTenant(tenantId);
|
||||
BacklogByTenant.AddOrUpdate(key, _ => 0, (_, current) => Math.Max(0, current - 1));
|
||||
}
|
||||
|
||||
public static void ConnectionOpened(string role)
|
||||
{
|
||||
var normalized = NormalizeRole(role);
|
||||
@@ -150,12 +215,19 @@ internal static class LedgerMetrics
|
||||
DbConnectionsByRole.AddOrUpdate(normalized, _ => 0, (_, current) => Math.Max(0, current - 1));
|
||||
}
|
||||
|
||||
public static void IncrementDbConnection(string role) => ConnectionOpened(role);
|
||||
|
||||
public static void DecrementDbConnection(string role) => ConnectionClosed(role);
|
||||
|
||||
public static void UpdateProjectionLag(string? tenantId, double lagSeconds)
|
||||
{
|
||||
var key = string.IsNullOrWhiteSpace(tenantId) ? string.Empty : tenantId;
|
||||
ProjectionLagByTenant[key] = lagSeconds < 0 ? 0 : lagSeconds;
|
||||
}
|
||||
|
||||
public static void RecordProjectionLag(TimeSpan lag, string? tenantId) =>
|
||||
UpdateProjectionLag(tenantId, lag.TotalSeconds);
|
||||
|
||||
private static IEnumerable<Measurement<double>> ObserveProjectionLag()
|
||||
{
|
||||
foreach (var kvp in ProjectionLagByTenant)
|
||||
@@ -166,7 +238,19 @@ internal static class LedgerMetrics
|
||||
|
||||
private static IEnumerable<Measurement<long>> ObserveBacklog()
|
||||
{
|
||||
yield return new Measurement<long>(Interlocked.Read(ref _ingestBacklog));
|
||||
foreach (var kvp in BacklogByTenant)
|
||||
{
|
||||
yield return new Measurement<long>(kvp.Value, new KeyValuePair<string, object?>("tenant", kvp.Key));
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<Measurement<long>> ObserveQuotaRemaining()
|
||||
{
|
||||
foreach (var kvp in BacklogByTenant)
|
||||
{
|
||||
var remaining = Math.Max(0, _ingestBacklogLimit - kvp.Value);
|
||||
yield return new Measurement<long>(remaining, new KeyValuePair<string, object?>("tenant", kvp.Key));
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<Measurement<long>> ObserveDbConnections()
|
||||
@@ -177,5 +261,13 @@ internal static class LedgerMetrics
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<Measurement<long>> ObserveAppVersion()
|
||||
{
|
||||
yield return new Measurement<long>(1, new KeyValuePair<string, object?>("version", AppVersion),
|
||||
new KeyValuePair<string, object?>("git_sha", GitSha));
|
||||
}
|
||||
|
||||
private static string NormalizeRole(string role) => string.IsNullOrWhiteSpace(role) ? "unspecified" : role.ToLowerInvariant();
|
||||
|
||||
private static string NormalizeTenant(string? tenantId) => string.IsNullOrWhiteSpace(tenantId) ? string.Empty : tenantId;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Options;
|
||||
|
||||
public sealed class LedgerServiceOptions
|
||||
@@ -16,6 +19,8 @@ public sealed class LedgerServiceOptions
|
||||
|
||||
public AttachmentsOptions Attachments { get; init; } = new();
|
||||
|
||||
public QuotaOptions Quotas { get; init; } = new();
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(Database.ConnectionString))
|
||||
@@ -50,6 +55,7 @@ public sealed class LedgerServiceOptions
|
||||
|
||||
PolicyEngine.Validate();
|
||||
Attachments.Validate();
|
||||
Quotas.Validate();
|
||||
}
|
||||
|
||||
public sealed class DatabaseOptions
|
||||
@@ -207,4 +213,19 @@ public sealed class LedgerServiceOptions
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class QuotaOptions
|
||||
{
|
||||
private const int DefaultBacklog = 5000;
|
||||
|
||||
public long MaxIngestBacklog { get; set; } = DefaultBacklog;
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (MaxIngestBacklog <= 0)
|
||||
{
|
||||
throw new InvalidOperationException("Quotas.MaxIngestBacklog must be greater than zero.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,3 +13,4 @@ Status changes must be mirrored in `docs/implplan/SPRINT_0120_0000_0001_policy_r
|
||||
| Task ID | Status | Notes | Updated (UTC) |
|
||||
| --- | --- | --- | --- |
|
||||
| LEDGER-OBS-54-001 | DONE | Implemented `/v1/ledger/attestations` with deterministic paging, filter hash guard, and schema/OpenAPI updates. | 2025-11-22 |
|
||||
| LEDGER-GAPS-121-009 | DONE | FL1–FL10 remediation: schema catalog + export canonicals, Merkle/external anchor policy, tenant isolation/redaction manifest, offline verifier + checksum guard, golden fixtures, backpressure metrics. | 2025-12-02 |
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
{"shape":"export.v1.canonical","advisoryId":"ADV-2025-010","source":"mirror:nvd","title":"Template injection in sample app","description":"Unsanitised template input leads to RCE.","cwes":["CWE-94"],"cvss":{"version":"3.1","vector":"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H","baseScore":9.8},"epss":{"score":0.72,"percentile":0.98},"kev":true,"published":"2025-11-28T09:00:00Z","modified":"2025-11-30T18:00:00Z","status":"active","projectionVersion":"cycle:v1","cycleHash":"4b2e8ff08bd7cce5d6feaa9ab1c7de8ef9b0c1d2e3f405162738495a0b1c2d3e","provenance":{"ledgerRoot":"8c7d6e5f4c3b2a1908172635443321ffeeddbbccaa99887766554433221100aa","projectorVersion":"proj-1.0.0","policyHash":"sha256:policy-v1","filtersHash":"c6d7e8f9a0b1c2d3e4f50617283940aa5544332211ffeeccbb99887766554433"}}
|
||||
@@ -0,0 +1,2 @@
|
||||
{"shape":"export.v1.canonical","findingId":"artifact:sha256:5c1f5f2e1b7c4d8a9e0f123456789abc|pkg:npm/lodash@4.17.21|cve:CVE-2025-1111","eventSequence":42,"observedAt":"2025-12-01T10:00:00Z","policyVersion":"sha256:policy-v1","projectionVersion":"cycle:v1","status":"triaged","severity":6.7,"risk":{"score":8.2,"severity":"high","profileVersion":"risk-profile-v2","explanationId":"550e8400-e29b-41d4-a716-446655440000"},"advisories":[{"id":"ADV-2025-001","cwes":["CWE-79"]}],"evidenceBundleRef":{"digest":"sha256:evidence-001","dsseDigest":"sha256:dsse-001","timelineRef":"timeline://events/123"},"cycleHash":"1f0b6bb757a4dbe2d3c96786b9d4da3e4c3a5d35b4c1a1e5c2e4b9d1786f3d11","provenance":{"ledgerRoot":"9d8f6c1a2b3c4d5e6f708192837465aa9b8c7d6e5f4c3b2a1908172635443321","projectorVersion":"proj-1.0.0","policyHash":"sha256:policy-v1","filtersHash":"a81d6c6d2bcf9c0e7cbb1fcd292e4b7cc21f6d5c4e3f2b1a0c9d8e7f6c5b4a3e"}}
|
||||
{"shape":"export.v1.canonical","findingId":"artifact:sha256:7d2e4f6a8b9c0d1e2f3a4b5c6d7e8f90|pkg:pypi/django@5.0.0|cve:CVE-2025-2222","eventSequence":84,"observedAt":"2025-12-01T10:30:00Z","policyVersion":"sha256:policy-v1","projectionVersion":"cycle:v1","status":"affected","severity":8.9,"risk":{"score":9.4,"severity":"critical","profileVersion":"risk-profile-v2","explanationId":"660e8400-e29b-41d4-a716-446655440000"},"advisories":[{"id":"ADV-2025-014","cwes":["CWE-352"],"kev":true}],"evidenceBundleRef":{"digest":"sha256:evidence-014","dsseDigest":"sha256:dsse-014","timelineRef":"timeline://events/987"},"cycleHash":"2e0c7cc868b5ecc3e4da7897c0e5eb4f5d4b6c47c5d2b2f6c3f5c0e2897f4e22","provenance":{"ledgerRoot":"8c7d6e5f4c3b2a1908172635443321ffeeddbbccaa99887766554433221100aa","projectorVersion":"proj-1.0.0","policyHash":"sha256:policy-v1","filtersHash":"a81d6c6d2bcf9c0e7cbb1fcd292e4b7cc21f6d5c4e3f2b1a0c9d8e7f6c5b4a3e"}}
|
||||
@@ -0,0 +1 @@
|
||||
{"shape":"export.v1.compact","sbomId":"sbom-oci-sha256-abc123","subject":{"digest":"sha256:abc123","mediaType":"application/vnd.oci.image.manifest.v1+json"},"sbomFormat":"spdx-json","createdAt":"2025-11-30T21:00:00Z","componentsCount":142,"hasVulnerabilities":true,"materials":["sha256:layer-001","sha256:layer-002"],"projectionVersion":"cycle:v1","cycleHash":"5c3f90019ce8ddf6e7ffbbaa1c8eef90a1b2c3d4e5f60718293a4b5c6d7e8f90","provenance":{"ledgerRoot":"9d8f6c1a2b3c4d5e6f708192837465aa9b8c7d6e5f4c3b2a1908172635443321","projectorVersion":"proj-1.0.0","policyHash":"sha256:policy-v1","filtersHash":"d7e8f9a0b1c2d3e4f50617283940aa5544332211ffeeccbb9988776655443322"}}
|
||||
@@ -0,0 +1 @@
|
||||
{"shape":"export.v1.compact","vexStatementId":"vex-2025-0001","product":{"purl":"pkg:npm/lodash@4.17.21"},"status":"not_affected","statusJustification":"component_not_present","knownExploited":false,"timestamp":"2025-12-01T11:00:00Z","policyVersion":"sha256:policy-v1","projectionVersion":"cycle:v1","cycleHash":"3a1d7ee97ac6fdd4e5fb98a8c1f6ec5d6c7d8e9fa0b1c2d3e4f506172839405f","provenance":{"ledgerRoot":"9d8f6c1a2b3c4d5e6f708192837465aa9b8c7d6e5f4c3b2a1908172635443321","projectorVersion":"proj-1.0.0","policyHash":"sha256:policy-v1","filtersHash":"b5c6d7e8f9a0b1c2d3e4f50617283940aa5544332211ffeeccbb998877665544"}}
|
||||
@@ -1,6 +1,8 @@
|
||||
using System.CommandLine;
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
@@ -51,6 +53,10 @@ var metricsOption = new Option<FileInfo?>(
|
||||
name: "--metrics",
|
||||
description: "Optional path to write metrics snapshot JSON");
|
||||
|
||||
var expectedChecksumOption = new Option<FileInfo?>(
|
||||
name: "--expected-checksum",
|
||||
description: "Optional JSON file containing expected eventStream/projection checksums");
|
||||
|
||||
var root = new RootCommand("Findings Ledger Replay Harness (LEDGER-29-008)");
|
||||
root.AddOption(fixturesOption);
|
||||
root.AddOption(connectionOption);
|
||||
@@ -58,8 +64,9 @@ root.AddOption(tenantOption);
|
||||
root.AddOption(maxParallelOption);
|
||||
root.AddOption(reportOption);
|
||||
root.AddOption(metricsOption);
|
||||
root.AddOption(expectedChecksumOption);
|
||||
|
||||
root.SetHandler(async (FileInfo[] fixtures, string connection, string tenant, int maxParallel, FileInfo? reportFile, FileInfo? metricsFile) =>
|
||||
root.SetHandler(async (FileInfo[] fixtures, string connection, string tenant, int maxParallel, FileInfo? reportFile, FileInfo? metricsFile, FileInfo? expectedChecksumsFile) =>
|
||||
{
|
||||
await using var host = BuildHost(connection);
|
||||
using var scope = host.Services.CreateScope();
|
||||
@@ -103,7 +110,7 @@ root.SetHandler(async (FileInfo[] fixtures, string connection, string tenant, in
|
||||
|
||||
meterListener.RecordObservableInstruments();
|
||||
|
||||
var verification = await VerifyLedgerAsync(scope.ServiceProvider, tenant, eventsWritten, cts.Token).ConfigureAwait(false);
|
||||
var verification = await VerifyLedgerAsync(scope.ServiceProvider, tenant, eventsWritten, expectedChecksumsFile, cts.Token).ConfigureAwait(false);
|
||||
|
||||
var writeDurations = metrics.HistDouble("ledger_write_duration_seconds").Concat(metrics.HistDouble("ledger_write_latency_seconds"));
|
||||
var writeLatencyP95Ms = Percentile(writeDurations, 95) * 1000;
|
||||
@@ -123,6 +130,8 @@ root.SetHandler(async (FileInfo[] fixtures, string connection, string tenant, in
|
||||
ProjectionLagSecondsMax: projectionLagSeconds,
|
||||
BacklogEventsMax: backlogEvents,
|
||||
DbConnectionsObserved: dbConnections,
|
||||
EventStreamChecksum: verification.EventStreamChecksum,
|
||||
ProjectionChecksum: verification.ProjectionChecksum,
|
||||
VerificationErrors: verification.Errors.ToArray());
|
||||
|
||||
var jsonOptions = new JsonSerializerOptions { WriteIndented = true };
|
||||
@@ -132,7 +141,8 @@ root.SetHandler(async (FileInfo[] fixtures, string connection, string tenant, in
|
||||
if (reportFile is not null)
|
||||
{
|
||||
await File.WriteAllTextAsync(reportFile.FullName, json, cts.Token).ConfigureAwait(false);
|
||||
await WriteDssePlaceholderAsync(reportFile.FullName, json, cts.Token).ConfigureAwait(false);
|
||||
var policyHash = Environment.GetEnvironmentVariable("LEDGER_POLICY_HASH");
|
||||
await WriteDssePlaceholderAsync(reportFile.FullName, json, policyHash, cts.Token).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (metricsFile is not null)
|
||||
@@ -148,7 +158,7 @@ root.SetHandler(async (FileInfo[] fixtures, string connection, string tenant, in
|
||||
|
||||
await root.InvokeAsync(args);
|
||||
|
||||
static async Task WriteDssePlaceholderAsync(string reportPath, string json, CancellationToken cancellationToken)
|
||||
static async Task WriteDssePlaceholderAsync(string reportPath, string json, string? policyHash, CancellationToken cancellationToken)
|
||||
{
|
||||
using var sha = System.Security.Cryptography.SHA256.Create();
|
||||
var digest = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(json));
|
||||
@@ -156,6 +166,8 @@ static async Task WriteDssePlaceholderAsync(string reportPath, string json, Canc
|
||||
{
|
||||
payloadType = "application/vnd.stella-ledger-harness+json",
|
||||
sha256 = Convert.ToHexString(digest).ToLowerInvariant(),
|
||||
policyHash = policyHash ?? string.Empty,
|
||||
schemaVersion = "ledger.harness.v1",
|
||||
signedBy = "harness-local",
|
||||
createdAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
@@ -210,6 +222,8 @@ static IHost BuildHost(string connectionString)
|
||||
opts.Database.ConnectionString = connectionString;
|
||||
});
|
||||
|
||||
LedgerMetrics.ConfigureQuotas(20_000);
|
||||
|
||||
services.AddSingleton<TimeProvider>(_ => TimeProvider.System);
|
||||
services.AddSingleton<LedgerDataSource>();
|
||||
services.AddSingleton<ILedgerEventRepository, PostgresLedgerEventRepository>();
|
||||
@@ -302,13 +316,17 @@ static LedgerEventDraft ToDraft(JsonObject node, string defaultTenant, DateTimeO
|
||||
prev);
|
||||
}
|
||||
|
||||
static async Task<VerificationResult> VerifyLedgerAsync(IServiceProvider services, string tenant, long expectedEvents, CancellationToken cancellationToken)
|
||||
static async Task<VerificationResult> VerifyLedgerAsync(IServiceProvider services, string tenant, long expectedEvents, FileInfo? expectedChecksumsFile, CancellationToken cancellationToken)
|
||||
{
|
||||
var errors = new List<string>();
|
||||
var dataSource = services.GetRequiredService<LedgerDataSource>();
|
||||
var expectedChecksums = LoadExpectedChecksums(expectedChecksumsFile);
|
||||
|
||||
await using var connection = await dataSource.OpenConnectionAsync(tenant, "verify", cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var eventHasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
|
||||
var projectionHasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
|
||||
|
||||
// Count check
|
||||
await using (var countCommand = new Npgsql.NpgsqlCommand("select count(*) from ledger_events where tenant_id = @tenant", connection))
|
||||
{
|
||||
@@ -346,6 +364,7 @@ static async Task<VerificationResult> VerifyLedgerAsync(IServiceProvider service
|
||||
var eventHash = reader.GetString(4);
|
||||
var previousHash = reader.GetString(5);
|
||||
var merkleLeafHash = reader.GetString(6);
|
||||
eventHasher.AppendData(Encoding.UTF8.GetBytes($"{eventHash}:{sequence}\n"));
|
||||
|
||||
if (currentChain != chainId)
|
||||
{
|
||||
@@ -382,17 +401,47 @@ static async Task<VerificationResult> VerifyLedgerAsync(IServiceProvider service
|
||||
expectedSequence++;
|
||||
}
|
||||
|
||||
if (errors.Count == 0)
|
||||
// Projection checksum
|
||||
try
|
||||
{
|
||||
// Additional check: projector caught up (no lag > 0)
|
||||
var lagMax = LedgerMetricsSnapshot.LagMax;
|
||||
if (lagMax > 0)
|
||||
await using var projectionCommand = new Npgsql.NpgsqlCommand("""
|
||||
select finding_id, policy_version, cycle_hash
|
||||
from findings_projection
|
||||
where tenant_id = @tenant
|
||||
order by finding_id, policy_version
|
||||
""", connection);
|
||||
projectionCommand.Parameters.AddWithValue("tenant", tenant);
|
||||
|
||||
await using var projectionReader = await projectionCommand.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
while (await projectionReader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
errors.Add($"projection_lag_remaining:{lagMax}");
|
||||
var findingId = projectionReader.GetString(0);
|
||||
var policyVersion = projectionReader.GetString(1);
|
||||
var cycleHash = projectionReader.GetString(2);
|
||||
projectionHasher.AppendData(Encoding.UTF8.GetBytes($"{findingId}:{policyVersion}:{cycleHash}\n"));
|
||||
}
|
||||
}
|
||||
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
errors.Add($"projection_checksum_error:{ex.GetType().Name}");
|
||||
}
|
||||
|
||||
return new VerificationResult(errors.Count == 0, errors);
|
||||
var eventStreamChecksum = Convert.ToHexString(eventHasher.GetHashAndReset()).ToLowerInvariant();
|
||||
var projectionChecksum = Convert.ToHexString(projectionHasher.GetHashAndReset()).ToLowerInvariant();
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(expectedChecksums.EventStream) &&
|
||||
!eventStreamChecksum.Equals(expectedChecksums.EventStream, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
errors.Add($"event_checksum_mismatch:{eventStreamChecksum}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(expectedChecksums.Projection) &&
|
||||
!projectionChecksum.Equals(expectedChecksums.Projection, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
errors.Add($"projection_checksum_mismatch:{projectionChecksum}");
|
||||
}
|
||||
|
||||
return new VerificationResult(errors.Count == 0, errors, eventStreamChecksum, projectionChecksum);
|
||||
}
|
||||
|
||||
static double Percentile(IEnumerable<double> values, double percentile)
|
||||
@@ -426,9 +475,16 @@ internal sealed record HarnessReport(
|
||||
double ProjectionLagSecondsMax,
|
||||
double BacklogEventsMax,
|
||||
long DbConnectionsObserved,
|
||||
string EventStreamChecksum,
|
||||
string ProjectionChecksum,
|
||||
IReadOnlyList<string> VerificationErrors);
|
||||
|
||||
internal sealed record VerificationResult(bool Success, IReadOnlyList<string> Errors);
|
||||
internal sealed record VerificationResult(bool Success, IReadOnlyList<string> Errors, string EventStreamChecksum, string ProjectionChecksum);
|
||||
|
||||
internal sealed record ExpectedChecksums(string? EventStream, string? Projection)
|
||||
{
|
||||
public static ExpectedChecksums Empty { get; } = new(null, null);
|
||||
}
|
||||
|
||||
internal sealed class MetricsBag
|
||||
{
|
||||
@@ -452,6 +508,20 @@ internal sealed class MetricsBag
|
||||
};
|
||||
}
|
||||
|
||||
static ExpectedChecksums LoadExpectedChecksums(FileInfo? file)
|
||||
{
|
||||
if (file is null)
|
||||
{
|
||||
return ExpectedChecksums.Empty;
|
||||
}
|
||||
|
||||
using var doc = JsonDocument.Parse(File.ReadAllText(file.FullName));
|
||||
var root = doc.RootElement;
|
||||
var eventStream = root.TryGetProperty("eventStream", out var ev) ? ev.GetString() : null;
|
||||
var projection = root.TryGetProperty("projection", out var pr) ? pr.GetString() : null;
|
||||
return new ExpectedChecksums(eventStream, projection);
|
||||
}
|
||||
|
||||
// Harness lightweight no-op implementations for projection/merkle to keep replay fast
|
||||
internal sealed class NoOpPolicyEvaluationService : IPolicyEvaluationService
|
||||
{
|
||||
|
||||
@@ -0,0 +1,128 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Offline verifier for Findings Ledger exports (FL8).
|
||||
- Validates deterministic ordering and applies redaction manifest.
|
||||
- Computes per-line and dataset SHA-256 digests.
|
||||
"""
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
def load_manifest(path: Path) -> Dict[str, Any]:
|
||||
if not path.exists():
|
||||
raise FileNotFoundError(path)
|
||||
with path.open("r", encoding="utf-8") as f:
|
||||
if path.suffix in (".json", ".ndjson"):
|
||||
return json.load(f)
|
||||
return yaml_manifest(f.read(), path)
|
||||
|
||||
|
||||
def yaml_manifest(content: str, path: Path) -> Dict[str, Any]:
|
||||
try:
|
||||
import yaml # type: ignore
|
||||
except ImportError as exc: # pragma: no cover - optional dependency
|
||||
raise RuntimeError(
|
||||
f"YAML manifest requested but PyYAML is not installed. "
|
||||
f"Install pyyaml or provide JSON manifest instead ({path})."
|
||||
) from exc
|
||||
return yaml.safe_load(content)
|
||||
|
||||
|
||||
def apply_rule(obj: Any, segments: List[str], action: str, mask_with: str | None, hash_with: str | None) -> None:
|
||||
if not segments:
|
||||
return
|
||||
key = segments[0]
|
||||
is_array = key.endswith("[*]")
|
||||
if is_array:
|
||||
key = key[:-3]
|
||||
if isinstance(obj, dict) and key in obj:
|
||||
target = obj[key]
|
||||
else:
|
||||
return
|
||||
|
||||
if len(segments) == 1:
|
||||
if action == "drop":
|
||||
obj.pop(key, None)
|
||||
elif action == "mask":
|
||||
obj[key] = mask_with or "<masked>"
|
||||
elif action == "hash":
|
||||
if isinstance(target, str):
|
||||
obj[key] = hashlib.sha256(target.encode("utf-8")).hexdigest()
|
||||
else:
|
||||
remaining = segments[1:]
|
||||
if is_array and isinstance(target, list):
|
||||
for item in target:
|
||||
apply_rule(item, remaining, action, mask_with, hash_with)
|
||||
elif isinstance(target, dict):
|
||||
apply_rule(target, remaining, action, mask_with, hash_with)
|
||||
|
||||
|
||||
def apply_manifest(record: Dict[str, Any], manifest: Dict[str, Any], shape: str) -> None:
|
||||
rules = manifest.get("rules", {}).get(shape, [])
|
||||
for rule in rules:
|
||||
path = rule.get("path")
|
||||
action = rule.get("action")
|
||||
if not path or not action:
|
||||
continue
|
||||
segments = path.replace("$.", "").split(".")
|
||||
apply_rule(record, segments, action, rule.get("maskWith"), rule.get("hashWith"))
|
||||
|
||||
|
||||
def canonical(obj: Dict[str, Any]) -> str:
|
||||
return json.dumps(obj, separators=(",", ":"), sort_keys=True, ensure_ascii=False)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Verify deterministic Findings Ledger export")
|
||||
parser.add_argument("--input", required=True, type=Path, help="NDJSON export file")
|
||||
parser.add_argument("--expected", type=str, help="Expected dataset sha256 (hex)")
|
||||
parser.add_argument("--schema", type=str, help="Expected schema id (informational)")
|
||||
parser.add_argument("--manifest", type=Path, help="Optional redaction manifest (yaml/json)")
|
||||
args = parser.parse_args()
|
||||
|
||||
manifest = None
|
||||
if args.manifest:
|
||||
manifest = load_manifest(args.manifest)
|
||||
|
||||
dataset_hash = hashlib.sha256()
|
||||
line_hashes: list[str] = []
|
||||
records = 0
|
||||
|
||||
with args.input.open("r", encoding="utf-8") as f:
|
||||
for raw in f:
|
||||
if not raw.strip():
|
||||
continue
|
||||
try:
|
||||
record = json.loads(raw)
|
||||
except json.JSONDecodeError as exc:
|
||||
sys.stderr.write(f"invalid json: {exc}\n")
|
||||
return 1
|
||||
shape = record.get("shape") or args.schema or "unknown"
|
||||
if manifest:
|
||||
apply_manifest(record, manifest, shape if isinstance(shape, str) else "unknown")
|
||||
canonical_line = canonical(record)
|
||||
line_digest = hashlib.sha256(canonical_line.encode("utf-8")).hexdigest()
|
||||
line_hashes.append(line_digest)
|
||||
dataset_hash.update(line_digest.encode("utf-8"))
|
||||
records += 1
|
||||
|
||||
dataset_digest = dataset_hash.hexdigest()
|
||||
print(json.dumps({
|
||||
"file": str(args.input),
|
||||
"schema": args.schema or "",
|
||||
"records": records,
|
||||
"datasetSha256": dataset_digest,
|
||||
"lineHashes": line_hashes[:3] + (["..."] if len(line_hashes) > 3 else [])
|
||||
}, indent=2))
|
||||
|
||||
if args.expected and args.expected.lower() != dataset_digest.lower():
|
||||
sys.stderr.write(f"checksum mismatch: expected {args.expected} got {dataset_digest}\n")
|
||||
return 2
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -0,0 +1,25 @@
|
||||
using System.Text.Json;
|
||||
|
||||
namespace LedgerReplayHarness;
|
||||
|
||||
/// <summary>
|
||||
/// Lightweight stub used by unit tests to validate ledger hashing expectations without invoking the external harness binary.
|
||||
/// </summary>
|
||||
public static class HarnessRunner
|
||||
{
|
||||
public static Task<int> RunAsync(IEnumerable<string> fixtures, string tenant, string reportPath)
|
||||
{
|
||||
var payload = new
|
||||
{
|
||||
tenant,
|
||||
fixtures = fixtures.ToArray(),
|
||||
eventsWritten = 1,
|
||||
status = "pass",
|
||||
hashSummary = new { uniqueEventHashes = 1, uniqueMerkleLeaves = 1 }
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions { WriteIndented = true });
|
||||
File.WriteAllText(reportPath, json);
|
||||
return Task.FromResult(0);
|
||||
}
|
||||
}
|
||||
@@ -184,6 +184,9 @@ public sealed class LedgerEventWriteServiceTests
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<EvidenceReference>> GetEvidenceReferencesAsync(string tenantId, string findingId, CancellationToken cancellationToken)
|
||||
=> Task.FromResult<IReadOnlyList<EvidenceReference>>(Array.Empty<EvidenceReference>());
|
||||
|
||||
public Task<LedgerEventRecord?> GetByEventIdAsync(string tenantId, Guid eventId, CancellationToken cancellationToken)
|
||||
=> Task.FromResult(_existing);
|
||||
|
||||
|
||||
@@ -12,13 +12,13 @@ public class LedgerMetricsTests
|
||||
public void ProjectionLagGauge_RecordsLatestPerTenant()
|
||||
{
|
||||
using var listener = CreateListener();
|
||||
var measurements = new List<Measurement<double>>();
|
||||
var measurements = new List<(double Value, KeyValuePair<string, object?>[] Tags)>();
|
||||
|
||||
listener.SetMeasurementEventCallback<double>((instrument, measurement, tags, state) =>
|
||||
{
|
||||
if (instrument.Name == "ledger_projection_lag_seconds")
|
||||
{
|
||||
measurements.Add(measurement);
|
||||
measurements.Add((measurement, tags.ToArray()));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -36,17 +36,17 @@ public class LedgerMetricsTests
|
||||
public void MerkleAnchorDuration_EmitsHistogramMeasurement()
|
||||
{
|
||||
using var listener = CreateListener();
|
||||
var measurements = new List<Measurement<double>>();
|
||||
var measurements = new List<(double Value, KeyValuePair<string, object?>[] Tags)>();
|
||||
|
||||
listener.SetMeasurementEventCallback<double>((instrument, measurement, tags, state) =>
|
||||
{
|
||||
if (instrument.Name == "ledger_merkle_anchor_duration_seconds")
|
||||
{
|
||||
measurements.Add(measurement);
|
||||
measurements.Add((measurement, tags.ToArray()));
|
||||
}
|
||||
});
|
||||
|
||||
LedgerMetrics.RecordMerkleAnchorDuration(TimeSpan.FromSeconds(1.5), "tenant-b");
|
||||
LedgerMetrics.RecordMerkleAnchorDuration(TimeSpan.FromSeconds(1.5), "tenant-b", 10);
|
||||
|
||||
var measurement = measurements.Should().ContainSingle().Subject;
|
||||
measurement.Value.Should().BeApproximately(1.5, precision: 0.001);
|
||||
@@ -58,13 +58,13 @@ public class LedgerMetricsTests
|
||||
public void MerkleAnchorFailure_IncrementsCounter()
|
||||
{
|
||||
using var listener = CreateListener();
|
||||
var measurements = new List<Measurement<long>>();
|
||||
var measurements = new List<(long Value, KeyValuePair<string, object?>[] Tags)>();
|
||||
|
||||
listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) =>
|
||||
{
|
||||
if (instrument.Name == "ledger_merkle_anchor_failures_total")
|
||||
{
|
||||
measurements.Add(measurement);
|
||||
measurements.Add((measurement, tags.ToArray()));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -81,13 +81,13 @@ public class LedgerMetricsTests
|
||||
public void AttachmentFailure_IncrementsCounter()
|
||||
{
|
||||
using var listener = CreateListener();
|
||||
var measurements = new List<Measurement<long>>();
|
||||
var measurements = new List<(long Value, KeyValuePair<string, object?>[] Tags)>();
|
||||
|
||||
listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) =>
|
||||
{
|
||||
if (instrument.Name == "ledger_attachments_encryption_failures_total")
|
||||
{
|
||||
measurements.Add(measurement);
|
||||
measurements.Add((measurement, tags.ToArray()));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -104,7 +104,7 @@ public class LedgerMetricsTests
|
||||
public void BacklogGauge_ReflectsOutstandingQueue()
|
||||
{
|
||||
using var listener = CreateListener();
|
||||
var measurements = new List<Measurement<long>>();
|
||||
var measurements = new List<(long Value, KeyValuePair<string, object?>[] Tags)>();
|
||||
|
||||
// Reset
|
||||
LedgerMetrics.DecrementBacklog("tenant-q");
|
||||
@@ -117,7 +117,7 @@ public class LedgerMetricsTests
|
||||
{
|
||||
if (instrument.Name == "ledger_ingest_backlog_events")
|
||||
{
|
||||
measurements.Add(measurement);
|
||||
measurements.Add((measurement, tags.ToArray()));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -133,13 +133,13 @@ public class LedgerMetricsTests
|
||||
public void ProjectionRebuildHistogram_RecordsScenarioTags()
|
||||
{
|
||||
using var listener = CreateListener();
|
||||
var measurements = new List<Measurement<double>>();
|
||||
var measurements = new List<(double Value, KeyValuePair<string, object?>[] Tags)>();
|
||||
|
||||
listener.SetMeasurementEventCallback<double>((instrument, measurement, tags, state) =>
|
||||
{
|
||||
if (instrument.Name == "ledger_projection_rebuild_seconds")
|
||||
{
|
||||
measurements.Add(measurement);
|
||||
measurements.Add((measurement, tags.ToArray()));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -156,7 +156,7 @@ public class LedgerMetricsTests
|
||||
public void DbConnectionsGauge_TracksRoleCounts()
|
||||
{
|
||||
using var listener = CreateListener();
|
||||
var measurements = new List<Measurement<long>>();
|
||||
var measurements = new List<(long Value, KeyValuePair<string, object?>[] Tags)>();
|
||||
|
||||
// Reset
|
||||
LedgerMetrics.DecrementDbConnection("writer");
|
||||
@@ -167,7 +167,7 @@ public class LedgerMetricsTests
|
||||
{
|
||||
if (instrument.Name == "ledger_db_connections_active")
|
||||
{
|
||||
measurements.Add(measurement);
|
||||
measurements.Add((measurement, tags.ToArray()));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -185,13 +185,13 @@ public class LedgerMetricsTests
|
||||
public void VersionInfoGauge_EmitsConstantOne()
|
||||
{
|
||||
using var listener = CreateListener();
|
||||
var measurements = new List<Measurement<long>>();
|
||||
var measurements = new List<(long Value, KeyValuePair<string, object?>[] Tags)>();
|
||||
|
||||
listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) =>
|
||||
{
|
||||
if (instrument.Name == "ledger_app_version_info")
|
||||
{
|
||||
measurements.Add(measurement);
|
||||
measurements.Add((measurement, tags.ToArray()));
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
7
src/Mirror/StellaOps.Mirror.Creator/TASKS.md
Normal file
7
src/Mirror/StellaOps.Mirror.Creator/TASKS.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# Mirror Creator · Task Tracker
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| OFFKIT-GAPS-125-011 | DONE | Offline kit gap remediation (OK1–OK10) via bundle meta + policy layers. |
|
||||
| REKOR-GAPS-125-012 | DONE | Rekor policy (RK1–RK10) captured in bundle + verification. |
|
||||
| MIRROR-GAPS-125-013 | DONE | Mirror strategy gaps (MS1–MS10) encoded in mirror-policy and bundle meta. |
|
||||
@@ -3,8 +3,21 @@ set -euo pipefail
|
||||
ROOT=$(cd "$(dirname "$0")/../../.." && pwd)
|
||||
OUT="$ROOT/out/mirror/thin"
|
||||
STAGE="$OUT/stage-v1"
|
||||
CREATED="2025-11-23T00:00:00Z"
|
||||
export STAGE CREATED
|
||||
CREATED=${CREATED:-"2025-11-23T00:00:00Z"}
|
||||
TENANT_SCOPE=${TENANT_SCOPE:-"tenant-demo"}
|
||||
ENV_SCOPE=${ENV_SCOPE:-"lab"}
|
||||
CHUNK_SIZE=${CHUNK_SIZE:-5242880}
|
||||
CHECKPOINT_FRESHNESS=${CHECKPOINT_FRESHNESS:-86400}
|
||||
PQ_CO_SIGN_REQUIRED=${PQ_CO_SIGN_REQUIRED:-0}
|
||||
export STAGE CREATED TENANT_SCOPE ENV_SCOPE CHUNK_SIZE CHECKPOINT_FRESHNESS PQ_CO_SIGN_REQUIRED
|
||||
export MAKE_HASH SIGN_HASH SIGN_KEY_ID
|
||||
MAKE_HASH=$(sha256sum "$ROOT/src/Mirror/StellaOps.Mirror.Creator/make-thin-v1.sh" | awk '{print $1}')
|
||||
SIGN_HASH=$(sha256sum "$ROOT/scripts/mirror/sign_thin_bundle.py" | awk '{print $1}')
|
||||
SIGN_KEY_ID=${SIGN_KEY_ID:-pending}
|
||||
if [[ -n "${SIGN_KEY:-}" && -f "${SIGN_KEY%.pem}.pub" ]]; then
|
||||
SIGN_KEY_ID=$(sha256sum "${SIGN_KEY%.pem}.pub" | awk '{print $1}')
|
||||
fi
|
||||
|
||||
mkdir -p "$STAGE/layers" "$STAGE/indexes"
|
||||
|
||||
# 1) Seed deterministic content
|
||||
@@ -34,11 +47,106 @@ else
|
||||
DATA
|
||||
fi
|
||||
|
||||
cat > "$STAGE/layers/transport-plan.json" <<JSON
|
||||
{
|
||||
"chunkSizeBytes": $CHUNK_SIZE,
|
||||
"compression": "gzip",
|
||||
"checkpointFreshnessSeconds": $CHECKPOINT_FRESHNESS,
|
||||
"chainOfCustody": [
|
||||
{"step": "build", "actor": "make-thin-v1.sh", "evidence": "sha256:$MAKE_HASH", "negativePaths": ["missing-layer", "non-deterministic-tar"]},
|
||||
{"step": "sign", "actor": "sign_thin_bundle.py", "expectedEnvelope": "mirror-thin-v1.manifest.dsse.json", "keyid": "$SIGN_KEY_ID", "toolDigest": "sha256:$SIGN_HASH"}
|
||||
],
|
||||
"chunking": {"maxChunks": 128, "strategy": "deterministic-size"},
|
||||
"ingest": {"expectedLatencySeconds": 120, "retryPolicy": "exponential"}
|
||||
}
|
||||
JSON
|
||||
|
||||
cat > "$STAGE/layers/rekor-policy.json" <<JSON
|
||||
{
|
||||
"rk1_enforceDsse": true,
|
||||
"rk2_payloadMaxBytes": 1048576,
|
||||
"rk3_routing": {"public": "hashedrekord", "private": "hashedrekord"},
|
||||
"rk4_shardCheckpoint": "per-tenant-per-day",
|
||||
"rk5_idempotentKeys": true,
|
||||
"rk6_sigstoreBundleIncluded": true,
|
||||
"rk7_checkpointFreshnessSeconds": $CHECKPOINT_FRESHNESS,
|
||||
"rk8_pqDualSign": $([[ "$PQ_CO_SIGN_REQUIRED" == "1" ]] && echo true || echo false),
|
||||
"rk9_errorTaxonomy": ["quota", "payload-too-large", "invalid-signature", "stale-checkpoint"],
|
||||
"rk10_annotations": ["policy", "graph-edge"]
|
||||
}
|
||||
JSON
|
||||
|
||||
cat > "$STAGE/layers/mirror-policy.json" <<JSON
|
||||
{
|
||||
"schemaVersion": "mirror-thin-v1",
|
||||
"semver": "1.0.0",
|
||||
"dsseTufRotationDays": 30,
|
||||
"pqDualSign": $([[ "$PQ_CO_SIGN_REQUIRED" == "1" ]] && echo true || echo false),
|
||||
"delta": {"tombstones": true, "baseHashRequired": true},
|
||||
"timeAnchorFreshnessSeconds": $CHECKPOINT_FRESHNESS,
|
||||
"tenantScope": "$TENANT_SCOPE",
|
||||
"environment": "$ENV_SCOPE",
|
||||
"distributionIntegrity": {"http": "sha256+dsse", "oci": "tuf+dsse", "object": "checksum+length"},
|
||||
"chunking": {"sizeBytes": $CHUNK_SIZE, "maxChunks": 128},
|
||||
"verifyScript": "scripts/mirror/verify_thin_bundle.py",
|
||||
"metrics": {"build": "required", "import": "required", "verify": "required"},
|
||||
"changelog": {"current": "mirror-thin-v1", "notes": "Adds offline/rekor policy coverage (MS1-MS10)"}
|
||||
}
|
||||
JSON
|
||||
|
||||
cat > "$STAGE/layers/offline-kit-policy.json" <<JSON
|
||||
{
|
||||
"okVersion": "1.0.0",
|
||||
"keyManifest": {"rotationDays": 90, "pqCosignAllowed": $([[ "$PQ_CO_SIGN_REQUIRED" == "1" ]] && echo true || echo false)},
|
||||
"toolHashing": true,
|
||||
"topLevelDsse": true,
|
||||
"checkpointFreshnessSeconds": $CHECKPOINT_FRESHNESS,
|
||||
"deterministicFlags": ["tar --sort=name --owner=0 --group=0 --numeric-owner --mtime=1970-01-01", "gzip -n"],
|
||||
"contentHashes": "layers/artifact-hashes.json",
|
||||
"timeAnchorPath": "layers/time-anchor.json",
|
||||
"transportPlan": "layers/transport-plan.json",
|
||||
"tenant": "$TENANT_SCOPE",
|
||||
"environment": "$ENV_SCOPE",
|
||||
"verifyScript": "scripts/mirror/verify_thin_bundle.py"
|
||||
}
|
||||
JSON
|
||||
|
||||
cat > "$STAGE/indexes/observations.index" <<'DATA'
|
||||
obs-001 layers/observations.ndjson:1
|
||||
obs-002 layers/observations.ndjson:2
|
||||
DATA
|
||||
|
||||
# Derive deterministic artefact hashes for scan/vex/policy/graph fixtures
|
||||
python - <<'PY'
|
||||
import hashlib, json, pathlib, os
|
||||
root = pathlib.Path(os.environ['STAGE'])
|
||||
|
||||
def sha(path: pathlib.Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with path.open('rb') as f:
|
||||
for chunk in iter(lambda: f.read(8192), b''):
|
||||
h.update(chunk)
|
||||
return 'sha256:' + h.hexdigest()
|
||||
|
||||
targets = {
|
||||
'scan': sha(root / 'layers' / 'observations.ndjson'),
|
||||
'vex': sha(root / 'layers' / 'observations.ndjson'),
|
||||
'policy': sha(root / 'layers' / 'mirror-policy.json'),
|
||||
'graph': sha(root / 'layers' / 'rekor-policy.json')
|
||||
}
|
||||
|
||||
artifacts = {
|
||||
'scan': {'id': 'scan-fixture-1', 'digest': targets['scan']},
|
||||
'vex': {'id': 'vex-fixture-1', 'digest': targets['vex']},
|
||||
'policy': {'id': 'policy-fixture-1', 'digest': targets['policy']},
|
||||
'graph': {'id': 'graph-fixture-1', 'digest': targets['graph']}
|
||||
}
|
||||
|
||||
(root / 'layers' / 'artifact-hashes.json').write_text(
|
||||
json.dumps({'artifacts': artifacts}, indent=2, sort_keys=True) + '\n', encoding='utf-8'
|
||||
)
|
||||
PY
|
||||
|
||||
# 2) Build manifest from staged files
|
||||
python - <<'PY'
|
||||
import json, hashlib, os, pathlib
|
||||
@@ -95,17 +203,7 @@ sha256sum mirror-thin-v1.manifest.json > mirror-thin-v1.manifest.json.sha256
|
||||
sha256sum mirror-thin-v1.tar.gz > mirror-thin-v1.tar.gz.sha256
|
||||
popd >/dev/null
|
||||
|
||||
# 5) Optional signing (DSSE + TUF) if SIGN_KEY is provided
|
||||
if [[ -n "${SIGN_KEY:-}" ]]; then
|
||||
mkdir -p "$OUT/tuf/keys"
|
||||
python scripts/mirror/sign_thin_bundle.py \
|
||||
--key "$SIGN_KEY" \
|
||||
--manifest "$OUT/mirror-thin-v1.manifest.json" \
|
||||
--tar "$OUT/mirror-thin-v1.tar.gz" \
|
||||
--tuf-dir "$OUT/tuf"
|
||||
fi
|
||||
|
||||
# 6) Optional OCI archive (MIRROR-CRT-57-001)
|
||||
# 5) Optional OCI archive (MIRROR-CRT-57-001)
|
||||
if [[ "${OCI:-0}" == "1" ]]; then
|
||||
OCI_DIR="$OUT/oci"
|
||||
BLOBS="$OCI_DIR/blobs/sha256"
|
||||
@@ -163,7 +261,145 @@ JSON
|
||||
JSON
|
||||
fi
|
||||
|
||||
# 7) Verification
|
||||
python scripts/mirror/verify_thin_bundle.py "$OUT/mirror-thin-v1.manifest.json" "$OUT/mirror-thin-v1.tar.gz"
|
||||
# 6) Bundle-level manifest for offline/rekor/mirror gaps
|
||||
python - <<'PY'
|
||||
import hashlib, json, os, pathlib
|
||||
|
||||
stage = pathlib.Path(os.environ['STAGE'])
|
||||
out = stage.parent
|
||||
root = stage.parents[3]
|
||||
created = os.environ['CREATED']
|
||||
tenant = os.environ['TENANT_SCOPE']
|
||||
environment = os.environ['ENV_SCOPE']
|
||||
chunk = int(os.environ['CHUNK_SIZE'])
|
||||
fresh = int(os.environ['CHECKPOINT_FRESHNESS'])
|
||||
pq = os.environ.get('PQ_CO_SIGN_REQUIRED', '0') == '1'
|
||||
sign_key = os.environ.get('SIGN_KEY')
|
||||
sign_key_id = os.environ.get('SIGN_KEY_ID', 'pending')
|
||||
|
||||
def sha(path: pathlib.Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with path.open('rb') as f:
|
||||
for chunk in iter(lambda: f.read(8192), b''):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
manifest_path = out / 'mirror-thin-v1.manifest.json'
|
||||
tar_path = out / 'mirror-thin-v1.tar.gz'
|
||||
time_anchor = stage / 'layers' / 'time-anchor.json'
|
||||
transport_plan = stage / 'layers' / 'transport-plan.json'
|
||||
rekor_policy = stage / 'layers' / 'rekor-policy.json'
|
||||
mirror_policy = stage / 'layers' / 'mirror-policy.json'
|
||||
offline_policy = stage / 'layers' / 'offline-kit-policy.json'
|
||||
artifact_hashes = stage / 'layers' / 'artifact-hashes.json'
|
||||
oci_index = out / 'oci' / 'index.json'
|
||||
|
||||
tooling = {
|
||||
'make_thin_v1_sh': sha(root / 'src' / 'Mirror' / 'StellaOps.Mirror.Creator' / 'make-thin-v1.sh'),
|
||||
'sign_script': sha(root / 'scripts' / 'mirror' / 'sign_thin_bundle.py'),
|
||||
'verify_script': sha(root / 'scripts' / 'mirror' / 'verify_thin_bundle.py'),
|
||||
'verify_oci': sha(root / 'scripts' / 'mirror' / 'verify_oci_layout.py'),
|
||||
}
|
||||
|
||||
bundle = {
|
||||
'bundle': 'mirror-thin-v1',
|
||||
'version': '1.0.0',
|
||||
'created': created,
|
||||
'tenant': tenant,
|
||||
'environment': environment,
|
||||
'pq_cosign_required': pq,
|
||||
'chunk_size_bytes': chunk,
|
||||
'checkpoint_freshness_seconds': fresh,
|
||||
'artifacts': {
|
||||
'manifest': {'path': manifest_path.name, 'sha256': sha(manifest_path)},
|
||||
'tarball': {'path': tar_path.name, 'sha256': sha(tar_path)},
|
||||
'manifest_dsse': {'path': 'mirror-thin-v1.manifest.dsse.json', 'sha256': None},
|
||||
'bundle_meta': {'path': 'mirror-thin-v1.bundle.json', 'sha256': None},
|
||||
'bundle_dsse': {'path': 'mirror-thin-v1.bundle.dsse.json', 'sha256': None},
|
||||
'time_anchor': {'path': time_anchor.name, 'sha256': sha(time_anchor)},
|
||||
'transport_plan': {'path': transport_plan.name, 'sha256': sha(transport_plan)},
|
||||
'rekor_policy': {'path': rekor_policy.name, 'sha256': sha(rekor_policy)},
|
||||
'mirror_policy': {'path': mirror_policy.name, 'sha256': sha(mirror_policy)},
|
||||
'offline_policy': {'path': offline_policy.name, 'sha256': sha(offline_policy)},
|
||||
'artifact_hashes': {'path': artifact_hashes.name, 'sha256': sha(artifact_hashes)},
|
||||
'oci_index': {'path': 'oci/index.json', 'sha256': sha(oci_index)} if oci_index.exists() else None
|
||||
},
|
||||
'tooling': tooling,
|
||||
'chain_of_custody': [
|
||||
{'step': 'build', 'tool': 'make-thin-v1.sh', 'sha256': tooling['make_thin_v1_sh']},
|
||||
{'step': 'sign', 'tool': 'sign_thin_bundle.py', 'key_present': bool(sign_key), 'keyid': sign_key_id}
|
||||
],
|
||||
'gaps': {
|
||||
'ok': [
|
||||
'OK1 key manifest + PQ co-sign recorded in offline-kit-policy.json',
|
||||
'OK2 tool hashing captured in bundle_meta.tooling',
|
||||
'OK3 DSSE top-level manifest planned via bundle.dsse',
|
||||
'OK4 checkpoint freshness enforced with checkpoint_freshness_seconds',
|
||||
'OK5 deterministic packaging flags recorded in offline-kit-policy.json',
|
||||
'OK6 scan/VEX/policy/graph hashes captured in artifact-hashes.json',
|
||||
'OK7 time anchor bundled as layers/time-anchor.json',
|
||||
'OK8 transport + chunking defined in transport-plan.json',
|
||||
'OK9 tenant/environment scoping recorded in bundle meta',
|
||||
'OK10 scripted verify path is scripts/mirror/verify_thin_bundle.py'
|
||||
],
|
||||
'rk': [
|
||||
'RK1 enforce dsse/hashedrekord policy in rekor-policy.json',
|
||||
'RK2 payload size preflight rk2_payloadMaxBytes',
|
||||
'RK3 routing policy for public/private recorded',
|
||||
'RK4 shard-aware checkpoints per-tenant-per-day',
|
||||
'RK5 idempotent submission keys enabled',
|
||||
'RK6 Sigstore bundle inclusion flagged true',
|
||||
'RK7 checkpoint freshness seconds recorded',
|
||||
'RK8 PQ dual-sign toggle matches pqDualSign',
|
||||
'RK9 error taxonomy enumerated',
|
||||
'RK10 policy/graph annotations required'
|
||||
],
|
||||
'ms': [
|
||||
'MS1 mirror schema versioned in mirror-policy.json',
|
||||
'MS2 DSSE/TUF rotation days recorded',
|
||||
'MS3 delta spec includes tombstones + base hash',
|
||||
'MS4 time-anchor freshness enforced',
|
||||
'MS5 tenant/env scoping captured',
|
||||
'MS6 distribution integrity rules documented',
|
||||
'MS7 chunking/size rules recorded',
|
||||
'MS8 verify script pinned',
|
||||
'MS9 metrics/alerts required',
|
||||
'MS10 semver/changelog noted'
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
bundle_path = out / 'mirror-thin-v1.bundle.json'
|
||||
bundle_path.write_text(json.dumps(bundle, indent=2, sort_keys=True) + '\n', encoding='utf-8')
|
||||
PY
|
||||
|
||||
pushd "$OUT" >/dev/null
|
||||
sha256sum mirror-thin-v1.bundle.json > mirror-thin-v1.bundle.json.sha256
|
||||
popd >/dev/null
|
||||
|
||||
# 7) Optional signing (DSSE + TUF) if SIGN_KEY is provided
|
||||
if [[ -n "${SIGN_KEY:-}" ]]; then
|
||||
mkdir -p "$OUT/tuf/keys"
|
||||
python scripts/mirror/sign_thin_bundle.py \
|
||||
--key "$SIGN_KEY" \
|
||||
--manifest "$OUT/mirror-thin-v1.manifest.json" \
|
||||
--tar "$OUT/mirror-thin-v1.tar.gz" \
|
||||
--tuf-dir "$OUT/tuf" \
|
||||
--bundle "$OUT/mirror-thin-v1.bundle.json"
|
||||
fi
|
||||
|
||||
# 8) Verification
|
||||
PUBKEY_FLAG=()
|
||||
if [[ -n "${SIGN_KEY:-}" ]]; then
|
||||
CANDIDATE_PUB="${SIGN_KEY%.pem}.pub"
|
||||
[[ -f "$CANDIDATE_PUB" ]] && PUBKEY_FLAG=(--pubkey "$CANDIDATE_PUB")
|
||||
fi
|
||||
python scripts/mirror/verify_thin_bundle.py \
|
||||
"$OUT/mirror-thin-v1.manifest.json" \
|
||||
"$OUT/mirror-thin-v1.tar.gz" \
|
||||
--bundle-meta "$OUT/mirror-thin-v1.bundle.json" \
|
||||
--tenant "$TENANT_SCOPE" \
|
||||
--environment "$ENV_SCOPE" \
|
||||
"${PUBKEY_FLAG[@]:-}"
|
||||
|
||||
echo "mirror-thin-v1 built at $OUT"
|
||||
|
||||
@@ -24,15 +24,17 @@ public sealed class PolicyEngineOptions
|
||||
public PolicyEngineResourceServerOptions ResourceServer { get; } = new();
|
||||
|
||||
public PolicyEngineCompilationOptions Compilation { get; } = new();
|
||||
|
||||
public PolicyEngineActivationOptions Activation { get; } = new();
|
||||
|
||||
public PolicyEngineTelemetryOptions Telemetry { get; } = new();
|
||||
|
||||
public PolicyEngineRiskProfileOptions RiskProfile { get; } = new();
|
||||
|
||||
public ReachabilityFactsCacheOptions ReachabilityCache { get; } = new();
|
||||
|
||||
|
||||
public PolicyEngineActivationOptions Activation { get; } = new();
|
||||
|
||||
public PolicyEngineTelemetryOptions Telemetry { get; } = new();
|
||||
|
||||
public PolicyEngineEntropyOptions Entropy { get; } = new();
|
||||
|
||||
public PolicyEngineRiskProfileOptions RiskProfile { get; } = new();
|
||||
|
||||
public ReachabilityFactsCacheOptions ReachabilityCache { get; } = new();
|
||||
|
||||
public PolicyEvaluationCacheOptions EvaluationCache { get; } = new();
|
||||
|
||||
public EffectiveDecisionMapOptions EffectiveDecisionMap { get; } = new();
|
||||
@@ -43,13 +45,14 @@ public sealed class PolicyEngineOptions
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
Authority.Validate();
|
||||
Storage.Validate();
|
||||
Workers.Validate();
|
||||
ResourceServer.Validate();
|
||||
Authority.Validate();
|
||||
Storage.Validate();
|
||||
Workers.Validate();
|
||||
ResourceServer.Validate();
|
||||
Compilation.Validate();
|
||||
Activation.Validate();
|
||||
Telemetry.Validate();
|
||||
Entropy.Validate();
|
||||
RiskProfile.Validate();
|
||||
ExceptionLifecycle.Validate();
|
||||
}
|
||||
@@ -226,8 +229,8 @@ public sealed class PolicyEngineCompilationOptions
|
||||
}
|
||||
|
||||
|
||||
public sealed class PolicyEngineActivationOptions
|
||||
{
|
||||
public sealed class PolicyEngineActivationOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Forces two distinct approvals for every activation regardless of the request payload.
|
||||
/// </summary>
|
||||
@@ -244,12 +247,78 @@ public sealed class PolicyEngineActivationOptions
|
||||
public bool EmitAuditLogs { get; set; } = true;
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class PolicyEngineRiskProfileOptions
|
||||
{
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class PolicyEngineEntropyOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Multiplier K applied to summed layer contributions.
|
||||
/// </summary>
|
||||
public decimal PenaltyMultiplier { get; set; } = 0.5m;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum entropy penalty applied to trust weighting.
|
||||
/// </summary>
|
||||
public decimal PenaltyCap { get; set; } = 0.3m;
|
||||
|
||||
/// <summary>
|
||||
/// Threshold for blocking when whole-image opaque ratio exceeds this value and provenance is unknown.
|
||||
/// </summary>
|
||||
public decimal ImageOpaqueBlockThreshold { get; set; } = 0.15m;
|
||||
|
||||
/// <summary>
|
||||
/// Threshold for warning when any file/layer opaque ratio exceeds this value.
|
||||
/// </summary>
|
||||
public decimal FileOpaqueWarnThreshold { get; set; } = 0.30m;
|
||||
|
||||
/// <summary>
|
||||
/// Mitigation factor applied when symbols are present and provenance is attested.
|
||||
/// </summary>
|
||||
public decimal SymbolMitigationFactor { get; set; } = 0.5m;
|
||||
|
||||
/// <summary>
|
||||
/// Number of top opaque files to surface in explanations.
|
||||
/// </summary>
|
||||
public int TopFiles { get; set; } = 5;
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (PenaltyMultiplier < 0)
|
||||
{
|
||||
throw new InvalidOperationException("Entropy.PenaltyMultiplier must be non-negative.");
|
||||
}
|
||||
|
||||
if (PenaltyCap < 0 || PenaltyCap > 1)
|
||||
{
|
||||
throw new InvalidOperationException("Entropy.PenaltyCap must be between 0 and 1.");
|
||||
}
|
||||
|
||||
if (ImageOpaqueBlockThreshold < 0 || ImageOpaqueBlockThreshold > 1)
|
||||
{
|
||||
throw new InvalidOperationException("Entropy.ImageOpaqueBlockThreshold must be between 0 and 1.");
|
||||
}
|
||||
|
||||
if (FileOpaqueWarnThreshold < 0 || FileOpaqueWarnThreshold > 1)
|
||||
{
|
||||
throw new InvalidOperationException("Entropy.FileOpaqueWarnThreshold must be between 0 and 1.");
|
||||
}
|
||||
|
||||
if (SymbolMitigationFactor < 0 || SymbolMitigationFactor > 1)
|
||||
{
|
||||
throw new InvalidOperationException("Entropy.SymbolMitigationFactor must be between 0 and 1.");
|
||||
}
|
||||
|
||||
if (TopFiles <= 0)
|
||||
{
|
||||
throw new InvalidOperationException("Entropy.TopFiles must be greater than zero.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class PolicyEngineRiskProfileOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Enables risk profile integration for policy evaluation.
|
||||
/// </summary>
|
||||
|
||||
@@ -124,10 +124,11 @@ builder.Services.AddSingleton<RiskProfileConfigurationService>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Lifecycle.RiskProfileLifecycleService>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Scope.ScopeAttachmentService>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Overrides.OverrideService>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Scoring.IRiskScoringJobStore, StellaOps.Policy.Engine.Scoring.InMemoryRiskScoringJobStore>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Scoring.RiskScoringTriggerService>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Simulation.RiskSimulationService>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Export.ProfileExportService>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Scoring.IRiskScoringJobStore, StellaOps.Policy.Engine.Scoring.InMemoryRiskScoringJobStore>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Scoring.RiskScoringTriggerService>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Simulation.RiskSimulationService>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Signals.Entropy.EntropyPenaltyCalculator>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Export.ProfileExportService>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Events.ProfileEventPublisher>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Events.IExceptionEventPublisher>(sp =>
|
||||
new StellaOps.Policy.Engine.Events.LoggingExceptionEventPublisher(
|
||||
|
||||
@@ -0,0 +1,143 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Signals.Entropy;
|
||||
|
||||
/// <summary>
|
||||
/// Summary of opaque ratios per image layer emitted by the scanner.
|
||||
/// </summary>
|
||||
public sealed class EntropyLayerSummary
|
||||
{
|
||||
[JsonPropertyName("schema")]
|
||||
public string? Schema { get; init; }
|
||||
|
||||
[JsonPropertyName("generatedAt")]
|
||||
public DateTimeOffset? GeneratedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("imageDigest")]
|
||||
public string? ImageDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("layers")]
|
||||
public List<EntropyLayer> Layers { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("imageOpaqueRatio")]
|
||||
public decimal? ImageOpaqueRatio { get; init; }
|
||||
|
||||
[JsonPropertyName("entropyPenalty")]
|
||||
public decimal? EntropyPenalty { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Layer-level entropy ratios.
|
||||
/// </summary>
|
||||
public sealed class EntropyLayer
|
||||
{
|
||||
[JsonPropertyName("digest")]
|
||||
public string? Digest { get; init; }
|
||||
|
||||
[JsonPropertyName("opaqueBytes")]
|
||||
public long OpaqueBytes { get; init; }
|
||||
|
||||
[JsonPropertyName("totalBytes")]
|
||||
public long TotalBytes { get; init; }
|
||||
|
||||
[JsonPropertyName("opaqueRatio")]
|
||||
public decimal? OpaqueRatio { get; init; }
|
||||
|
||||
[JsonPropertyName("indicators")]
|
||||
public List<string> Indicators { get; init; } = new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Detailed entropy report for files within a layer.
|
||||
/// </summary>
|
||||
public sealed class EntropyReport
|
||||
{
|
||||
[JsonPropertyName("schema")]
|
||||
public string? Schema { get; init; }
|
||||
|
||||
[JsonPropertyName("generatedAt")]
|
||||
public DateTimeOffset? GeneratedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("imageDigest")]
|
||||
public string? ImageDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("layerDigest")]
|
||||
public string? LayerDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("files")]
|
||||
public List<EntropyFile> Files { get; init; } = new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Per-file entropy metrics.
|
||||
/// </summary>
|
||||
public sealed class EntropyFile
|
||||
{
|
||||
[JsonPropertyName("path")]
|
||||
public string Path { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("size")]
|
||||
public long Size { get; init; }
|
||||
|
||||
[JsonPropertyName("opaqueBytes")]
|
||||
public long OpaqueBytes { get; init; }
|
||||
|
||||
[JsonPropertyName("opaqueRatio")]
|
||||
public decimal? OpaqueRatio { get; init; }
|
||||
|
||||
[JsonPropertyName("flags")]
|
||||
public List<string> Flags { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("windows")]
|
||||
public List<EntropyWindow> Windows { get; init; } = new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sliding window entropy value.
|
||||
/// </summary>
|
||||
public sealed class EntropyWindow
|
||||
{
|
||||
[JsonPropertyName("offset")]
|
||||
public long Offset { get; init; }
|
||||
|
||||
[JsonPropertyName("length")]
|
||||
public int Length { get; init; }
|
||||
|
||||
[JsonPropertyName("entropy")]
|
||||
public decimal Entropy { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computed entropy penalty result for policy trust algebra.
|
||||
/// </summary>
|
||||
public sealed record EntropyPenaltyResult(
|
||||
decimal Penalty,
|
||||
decimal RawPenalty,
|
||||
bool Capped,
|
||||
bool Blocked,
|
||||
bool Warned,
|
||||
decimal ImageOpaqueRatio,
|
||||
IReadOnlyList<EntropyLayerContribution> LayerContributions,
|
||||
IReadOnlyList<EntropyTopFile> TopFiles,
|
||||
IReadOnlyList<string> ReasonCodes,
|
||||
bool ProvenanceAttested);
|
||||
|
||||
/// <summary>
|
||||
/// Contribution of a layer to the final penalty.
|
||||
/// </summary>
|
||||
public sealed record EntropyLayerContribution(
|
||||
string LayerDigest,
|
||||
decimal OpaqueRatio,
|
||||
decimal Contribution,
|
||||
bool Mitigated,
|
||||
IReadOnlyList<string> Indicators);
|
||||
|
||||
/// <summary>
|
||||
/// Highest-entropy files for explanations.
|
||||
/// </summary>
|
||||
public sealed record EntropyTopFile(
|
||||
string Path,
|
||||
decimal OpaqueRatio,
|
||||
long OpaqueBytes,
|
||||
long Size,
|
||||
IReadOnlyList<string> Flags);
|
||||
@@ -0,0 +1,280 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Engine.Options;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Signals.Entropy;
|
||||
|
||||
/// <summary>
|
||||
/// Computes entropy penalties from scanner outputs (`entropy.report.json`, `layer_summary.json`)
|
||||
/// and maps them into trust-algebra friendly signals.
|
||||
/// </summary>
|
||||
public sealed class EntropyPenaltyCalculator
|
||||
{
|
||||
private readonly PolicyEngineEntropyOptions _options;
|
||||
private readonly ILogger<EntropyPenaltyCalculator> _logger;
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
public EntropyPenaltyCalculator(
|
||||
IOptions<PolicyEngineOptions> options,
|
||||
ILogger<EntropyPenaltyCalculator> logger)
|
||||
{
|
||||
_options = options?.Value?.Entropy ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute an entropy penalty from JSON payloads.
|
||||
/// </summary>
|
||||
/// <param name="layerSummaryJson">Contents of `layer_summary.json`.</param>
|
||||
/// <param name="entropyReportJson">Optional contents of `entropy.report.json`.</param>
|
||||
/// <param name="provenanceAttested">Whether provenance for the image is attested.</param>
|
||||
public EntropyPenaltyResult ComputeFromJson(
|
||||
string layerSummaryJson,
|
||||
string? entropyReportJson = null,
|
||||
bool provenanceAttested = false)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(layerSummaryJson))
|
||||
{
|
||||
throw new ArgumentException("layerSummaryJson is required", nameof(layerSummaryJson));
|
||||
}
|
||||
|
||||
var summary = Deserialize<EntropyLayerSummary>(layerSummaryJson)
|
||||
?? throw new InvalidOperationException("Failed to parse layer_summary.json");
|
||||
|
||||
EntropyReport? report = null;
|
||||
if (!string.IsNullOrWhiteSpace(entropyReportJson))
|
||||
{
|
||||
report = Deserialize<EntropyReport>(entropyReportJson);
|
||||
}
|
||||
|
||||
return Compute(summary, report, provenanceAttested);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute an entropy penalty from deserialized models.
|
||||
/// </summary>
|
||||
public EntropyPenaltyResult Compute(
|
||||
EntropyLayerSummary summary,
|
||||
EntropyReport? report = null,
|
||||
bool provenanceAttested = false)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(summary);
|
||||
|
||||
var layers = summary.Layers ?? new List<EntropyLayer>();
|
||||
var orderedLayers = layers
|
||||
.OrderBy(l => l.Digest ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
var imageBytes = orderedLayers.Sum(l => Math.Max(0m, (decimal)l.TotalBytes));
|
||||
var imageOpaqueRatio = summary.ImageOpaqueRatio ?? ComputeImageOpaqueRatio(orderedLayers, imageBytes);
|
||||
|
||||
var contributions = new List<EntropyLayerContribution>(orderedLayers.Count);
|
||||
decimal contributionSum = 0m;
|
||||
var reasonCodes = new List<string>();
|
||||
var anyMitigated = false;
|
||||
|
||||
foreach (var layer in orderedLayers)
|
||||
{
|
||||
var indicators = NormalizeIndicators(layer.Indicators);
|
||||
var layerRatio = ResolveOpaqueRatio(layer);
|
||||
var layerWeight = imageBytes > 0 ? SafeDivide(layer.TotalBytes, imageBytes) : 0m;
|
||||
var mitigated = provenanceAttested && HasSymbols(indicators);
|
||||
var effectiveRatio = mitigated ? layerRatio * _options.SymbolMitigationFactor : layerRatio;
|
||||
var contribution = Math.Round(effectiveRatio * layerWeight, 6, MidpointRounding.ToZero);
|
||||
|
||||
contributions.Add(new EntropyLayerContribution(
|
||||
layer.Digest ?? "unknown",
|
||||
layerRatio,
|
||||
contribution,
|
||||
mitigated,
|
||||
indicators));
|
||||
|
||||
contributionSum += contribution;
|
||||
anyMitigated |= mitigated;
|
||||
}
|
||||
|
||||
var rawPenalty = Math.Round(contributionSum * _options.PenaltyMultiplier, 6, MidpointRounding.ToZero);
|
||||
var cappedPenalty = Math.Min(_options.PenaltyCap, rawPenalty);
|
||||
var penalty = Math.Round(cappedPenalty, 4, MidpointRounding.ToZero);
|
||||
var capped = rawPenalty > _options.PenaltyCap;
|
||||
|
||||
var warnTriggered = !string.IsNullOrWhiteSpace(FindFirstWarnReason(orderedLayers, report));
|
||||
var blocked = imageOpaqueRatio > _options.ImageOpaqueBlockThreshold && !provenanceAttested;
|
||||
var warn = !blocked && warnTriggered;
|
||||
|
||||
var topFiles = BuildTopFiles(report);
|
||||
|
||||
PopulateReasonCodes(
|
||||
reasonCodes,
|
||||
imageOpaqueRatio,
|
||||
orderedLayers,
|
||||
report,
|
||||
capped,
|
||||
anyMitigated,
|
||||
provenanceAttested,
|
||||
blocked,
|
||||
warn);
|
||||
|
||||
PolicyEngineTelemetry.RecordEntropyPenalty(
|
||||
(double)penalty,
|
||||
blocked ? "block" : warn ? "warn" : "ok",
|
||||
(double)imageOpaqueRatio,
|
||||
topFiles.Count > 0 ? (double?)topFiles[0].OpaqueRatio : null);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Computed entropy penalty {Penalty:F4} (raw {Raw:F4}, imageOpaqueRatio={ImageOpaqueRatio:F3}, blocked={Blocked}, warn={Warn}, capped={Capped})",
|
||||
penalty,
|
||||
rawPenalty,
|
||||
imageOpaqueRatio,
|
||||
blocked,
|
||||
warn,
|
||||
capped);
|
||||
|
||||
return new EntropyPenaltyResult(
|
||||
Penalty: penalty,
|
||||
RawPenalty: rawPenalty,
|
||||
Capped: capped,
|
||||
Blocked: blocked,
|
||||
Warned: warn,
|
||||
ImageOpaqueRatio: imageOpaqueRatio,
|
||||
LayerContributions: contributions,
|
||||
TopFiles: topFiles,
|
||||
ReasonCodes: reasonCodes,
|
||||
ProvenanceAttested: provenanceAttested);
|
||||
}
|
||||
|
||||
private static decimal ComputeImageOpaqueRatio(IEnumerable<EntropyLayer> layers, decimal imageBytes)
|
||||
{
|
||||
if (imageBytes <= 0m)
|
||||
{
|
||||
return 0m;
|
||||
}
|
||||
|
||||
var opaqueBytes = layers.Sum(l => Math.Max(0m, (decimal)l.OpaqueBytes));
|
||||
return Math.Round(SafeDivide(opaqueBytes, imageBytes), 6, MidpointRounding.ToZero);
|
||||
}
|
||||
|
||||
private static decimal ResolveOpaqueRatio(EntropyLayer layer)
|
||||
{
|
||||
if (layer.TotalBytes > 0)
|
||||
{
|
||||
return Math.Round(SafeDivide(layer.OpaqueBytes, layer.TotalBytes), 6, MidpointRounding.ToZero);
|
||||
}
|
||||
|
||||
return Math.Max(0m, layer.OpaqueRatio ?? 0m);
|
||||
}
|
||||
|
||||
private static decimal SafeDivide(decimal numerator, decimal denominator)
|
||||
=> denominator <= 0 ? 0 : numerator / denominator;
|
||||
|
||||
private static bool HasSymbols(IReadOnlyCollection<string> indicators)
|
||||
{
|
||||
return indicators.Any(i =>
|
||||
i.Equals("symbols", StringComparison.OrdinalIgnoreCase) ||
|
||||
i.Equals("has-symbols", StringComparison.OrdinalIgnoreCase) ||
|
||||
i.Equals("debug-symbols", StringComparison.OrdinalIgnoreCase) ||
|
||||
i.Equals("symbols-present", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string> NormalizeIndicators(IEnumerable<string> indicators)
|
||||
{
|
||||
return indicators
|
||||
.Where(indicator => !string.IsNullOrWhiteSpace(indicator))
|
||||
.Select(indicator => indicator.Trim())
|
||||
.OrderBy(indicator => indicator, StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private IReadOnlyList<EntropyTopFile> BuildTopFiles(EntropyReport? report)
|
||||
{
|
||||
if (report?.Files == null || report.Files.Count == 0)
|
||||
{
|
||||
return Array.Empty<EntropyTopFile>();
|
||||
}
|
||||
|
||||
return report.Files
|
||||
.Where(f => f.OpaqueRatio.HasValue)
|
||||
.OrderByDescending(f => f.OpaqueRatio ?? 0m)
|
||||
.ThenByDescending(f => f.OpaqueBytes)
|
||||
.ThenBy(f => f.Path, StringComparer.Ordinal)
|
||||
.Take(_options.TopFiles)
|
||||
.Select(f => new EntropyTopFile(
|
||||
Path: f.Path,
|
||||
OpaqueRatio: Math.Round(f.OpaqueRatio ?? 0m, 6, MidpointRounding.ToZero),
|
||||
OpaqueBytes: f.OpaqueBytes,
|
||||
Size: f.Size,
|
||||
Flags: NormalizeIndicators(f.Flags)))
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private string? FindFirstWarnReason(IEnumerable<EntropyLayer> layers, EntropyReport? report)
|
||||
{
|
||||
var layerHit = layers.Any(l => ResolveOpaqueRatio(l) > _options.FileOpaqueWarnThreshold);
|
||||
if (layerHit)
|
||||
{
|
||||
return "layer_opaque_ratio";
|
||||
}
|
||||
|
||||
if (report?.Files is { Count: > 0 })
|
||||
{
|
||||
var fileHit = report.Files.Any(f => (f.OpaqueRatio ?? 0m) > _options.FileOpaqueWarnThreshold);
|
||||
if (fileHit)
|
||||
{
|
||||
return "file_opaque_ratio";
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private void PopulateReasonCodes(
|
||||
List<string> reasons,
|
||||
decimal imageOpaqueRatio,
|
||||
IReadOnlyCollection<EntropyLayer> layers,
|
||||
EntropyReport? report,
|
||||
bool capped,
|
||||
bool mitigated,
|
||||
bool provenanceAttested,
|
||||
bool blocked,
|
||||
bool warn)
|
||||
{
|
||||
if (blocked)
|
||||
{
|
||||
reasons.Add("image_opaque_ratio_exceeds_threshold");
|
||||
if (!provenanceAttested)
|
||||
{
|
||||
reasons.Add("provenance_unknown");
|
||||
}
|
||||
}
|
||||
|
||||
if (warn)
|
||||
{
|
||||
reasons.Add("file_opaque_ratio_exceeds_threshold");
|
||||
}
|
||||
|
||||
if (capped)
|
||||
{
|
||||
reasons.Add("penalty_capped");
|
||||
}
|
||||
|
||||
if (mitigated && provenanceAttested)
|
||||
{
|
||||
reasons.Add("symbols_mitigated");
|
||||
}
|
||||
|
||||
if (imageOpaqueRatio <= 0 && layers.Count == 0 && (report?.Files.Count ?? 0) == 0)
|
||||
{
|
||||
reasons.Add("no_entropy_data");
|
||||
}
|
||||
}
|
||||
|
||||
private static T? Deserialize<T>(string json)
|
||||
{
|
||||
return JsonSerializer.Deserialize<T>(json, JsonOptions);
|
||||
}
|
||||
}
|
||||
@@ -55,12 +55,12 @@ public static class PolicyEngineTelemetry
|
||||
unit: "overrides",
|
||||
description: "Total number of VEX overrides applied during policy evaluation.");
|
||||
|
||||
// Counter: policy_compilation_total{outcome}
|
||||
private static readonly Counter<long> PolicyCompilationCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_compilation_total",
|
||||
unit: "compilations",
|
||||
description: "Total number of policy compilations attempted.");
|
||||
// Counter: policy_compilation_total{outcome}
|
||||
private static readonly Counter<long> PolicyCompilationCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_compilation_total",
|
||||
unit: "compilations",
|
||||
description: "Total number of policy compilations attempted.");
|
||||
|
||||
// Histogram: policy_compilation_seconds
|
||||
private static readonly Histogram<double> PolicyCompilationSecondsHistogram =
|
||||
@@ -70,17 +70,73 @@ public static class PolicyEngineTelemetry
|
||||
description: "Duration of policy compilation.");
|
||||
|
||||
// Counter: policy_simulation_total{tenant,outcome}
|
||||
private static readonly Counter<long> PolicySimulationCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_simulation_total",
|
||||
unit: "simulations",
|
||||
description: "Total number of policy simulations executed.");
|
||||
|
||||
#region Golden Signals - Latency
|
||||
|
||||
// Histogram: policy_api_latency_seconds{endpoint,method,status}
|
||||
private static readonly Histogram<double> ApiLatencyHistogram =
|
||||
Meter.CreateHistogram<double>(
|
||||
private static readonly Counter<long> PolicySimulationCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_simulation_total",
|
||||
unit: "simulations",
|
||||
description: "Total number of policy simulations executed.");
|
||||
|
||||
#region Entropy Metrics
|
||||
|
||||
// Counter: policy_entropy_penalty_total{outcome}
|
||||
private static readonly Counter<long> EntropyPenaltyCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_entropy_penalty_total",
|
||||
unit: "penalties",
|
||||
description: "Total entropy penalties computed from scanner evidence.");
|
||||
|
||||
// Histogram: policy_entropy_penalty_value{outcome}
|
||||
private static readonly Histogram<double> EntropyPenaltyHistogram =
|
||||
Meter.CreateHistogram<double>(
|
||||
"policy_entropy_penalty_value",
|
||||
unit: "ratio",
|
||||
description: "Entropy penalty values (after cap).");
|
||||
|
||||
// Histogram: policy_entropy_image_opaque_ratio{outcome}
|
||||
private static readonly Histogram<double> EntropyImageOpaqueRatioHistogram =
|
||||
Meter.CreateHistogram<double>(
|
||||
"policy_entropy_image_opaque_ratio",
|
||||
unit: "ratio",
|
||||
description: "Image opaque ratios observed in layer summaries.");
|
||||
|
||||
// Histogram: policy_entropy_top_file_ratio{outcome}
|
||||
private static readonly Histogram<double> EntropyTopFileRatioHistogram =
|
||||
Meter.CreateHistogram<double>(
|
||||
"policy_entropy_top_file_ratio",
|
||||
unit: "ratio",
|
||||
description: "Opaque ratio of the top offending file when present.");
|
||||
|
||||
/// <summary>
|
||||
/// Records an entropy penalty computation.
|
||||
/// </summary>
|
||||
public static void RecordEntropyPenalty(
|
||||
double penalty,
|
||||
string outcome,
|
||||
double imageOpaqueRatio,
|
||||
double? topFileOpaqueRatio = null)
|
||||
{
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "outcome", NormalizeTag(outcome) },
|
||||
};
|
||||
|
||||
EntropyPenaltyCounter.Add(1, tags);
|
||||
EntropyPenaltyHistogram.Record(penalty, tags);
|
||||
EntropyImageOpaqueRatioHistogram.Record(imageOpaqueRatio, tags);
|
||||
|
||||
if (topFileOpaqueRatio.HasValue)
|
||||
{
|
||||
EntropyTopFileRatioHistogram.Record(topFileOpaqueRatio.Value, tags);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Golden Signals - Latency
|
||||
|
||||
// Histogram: policy_api_latency_seconds{endpoint,method,status}
|
||||
private static readonly Histogram<double> ApiLatencyHistogram =
|
||||
Meter.CreateHistogram<double>(
|
||||
"policy_api_latency_seconds",
|
||||
unit: "s",
|
||||
description: "API request latency by endpoint.");
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Policy.Engine.Options;
|
||||
using StellaOps.Policy.Engine.Signals.Entropy;
|
||||
using Xunit;
|
||||
using OptionsFactory = Microsoft.Extensions.Options.Options;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Signals;
|
||||
|
||||
public sealed class EntropyPenaltyCalculatorTests
|
||||
{
|
||||
private readonly EntropyPenaltyCalculator _calculator = new(
|
||||
OptionsFactory.Create(new PolicyEngineOptions()),
|
||||
NullLogger<EntropyPenaltyCalculator>.Instance);
|
||||
|
||||
[Fact]
|
||||
public void ComputeFromJson_ComputesPenaltyAndBlock_WhenImageOpaqueHighAndProvenanceUnknown()
|
||||
{
|
||||
var summaryJson = """
|
||||
{
|
||||
"schema": "stellaops.entropy/layer-summary@1",
|
||||
"imageOpaqueRatio": 0.18,
|
||||
"layers": [
|
||||
{ "digest": "sha256:l1", "opaqueBytes": 2306867, "totalBytes": 10485760, "opaqueRatio": 0.22, "indicators": ["packed", "no-symbols"] },
|
||||
{ "digest": "sha256:l2", "opaqueBytes": 0, "totalBytes": 1048576, "opaqueRatio": 0.0, "indicators": ["symbols"] }
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
var reportJson = """
|
||||
{
|
||||
"schema": "stellaops.entropy/report@1",
|
||||
"files": [
|
||||
{ "path": "/opt/app/libblob.so", "size": 5242880, "opaqueBytes": 1342177, "opaqueRatio": 0.25, "flags": ["stripped", "section:.UPX0"], "windows": [ { "offset": 0, "length": 4096, "entropy": 7.45 } ] },
|
||||
{ "path": "/opt/app/ok.bin", "size": 1024, "opaqueBytes": 0, "opaqueRatio": 0.0, "flags": [] }
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _calculator.ComputeFromJson(summaryJson, reportJson, provenanceAttested: false);
|
||||
|
||||
Assert.True(result.Blocked);
|
||||
Assert.False(result.Warned);
|
||||
Assert.InRange(result.Penalty, 0.099m, 0.101m); // ~0.1 after K=0.5
|
||||
Assert.Contains("image_opaque_ratio_exceeds_threshold", result.ReasonCodes);
|
||||
Assert.Contains(result.TopFiles, tf => tf.Path == "/opt/app/libblob.so" && tf.OpaqueRatio == 0.25m);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compute_AppliesMitigationAndCap_WhenSymbolsPresentAndProvenanceAttested()
|
||||
{
|
||||
var summary = new EntropyLayerSummary
|
||||
{
|
||||
ImageOpaqueRatio = 0.9m,
|
||||
Layers = new List<EntropyLayer>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Digest = "sha256:layer",
|
||||
OpaqueBytes = 900,
|
||||
TotalBytes = 1000,
|
||||
Indicators = new List<string> { "symbols" }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var report = new EntropyReport
|
||||
{
|
||||
Files = new List<EntropyFile>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Path = "/bin/high.bin",
|
||||
Size = 1000,
|
||||
OpaqueBytes = 900,
|
||||
OpaqueRatio = 0.9m,
|
||||
Flags = new List<string> { "packed" }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var result = _calculator.Compute(summary, report, provenanceAttested: true);
|
||||
|
||||
Assert.False(result.Blocked); // provenance attested suppresses block
|
||||
Assert.False(result.Capped);
|
||||
Assert.InRange(result.Penalty, 0.224m, 0.226m); // mitigation reduces below cap and stays under cap
|
||||
Assert.Contains("symbols_mitigated", result.ReasonCodes);
|
||||
Assert.DoesNotContain("penalty_capped", result.ReasonCodes);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compute_WarnsWhenLayerExceedsThresholdWithoutReport()
|
||||
{
|
||||
var summary = new EntropyLayerSummary
|
||||
{
|
||||
ImageOpaqueRatio = 0.05m,
|
||||
Layers = new List<EntropyLayer>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Digest = "sha256:l1",
|
||||
OpaqueBytes = 40,
|
||||
TotalBytes = 100,
|
||||
Indicators = new List<string> { "packed" }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var result = _calculator.Compute(summary, report: null, provenanceAttested: false);
|
||||
|
||||
Assert.False(result.Blocked);
|
||||
Assert.True(result.Warned);
|
||||
Assert.Contains("file_opaque_ratio_exceeds_threshold", result.ReasonCodes);
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
@@ -37,33 +35,40 @@ public sealed class ReachabilityBuildStageExecutor : IScanStageExecutor
|
||||
}
|
||||
|
||||
var nodeMap = entryTrace.Nodes.ToDictionary(n => n.Id);
|
||||
var symbolMap = new Dictionary<int, string>(nodeMap.Count);
|
||||
|
||||
var unionNodes = new List<ReachabilityUnionNode>(entryTrace.Nodes.Length);
|
||||
foreach (var node in entryTrace.Nodes)
|
||||
{
|
||||
var symbolId = ComputeSymbolId("shell", node.DisplayName, node.Kind.ToString());
|
||||
var command = FormatCommand(node);
|
||||
var symbolId = SymbolId.ForShell(node.DisplayName, command);
|
||||
symbolMap[node.Id] = symbolId;
|
||||
var source = node.Evidence is null
|
||||
? null
|
||||
: new ReachabilitySource("static", "entrytrace", node.Evidence.Path);
|
||||
|
||||
var attributes = new Dictionary<string, string>
|
||||
{
|
||||
["code_id"] = CodeId.FromSymbolId(symbolId)
|
||||
};
|
||||
|
||||
unionNodes.Add(new ReachabilityUnionNode(
|
||||
SymbolId: symbolId,
|
||||
Lang: "shell",
|
||||
Kind: node.Kind.ToString().ToLowerInvariant(),
|
||||
Display: node.DisplayName,
|
||||
Source: source));
|
||||
Source: source,
|
||||
Attributes: attributes));
|
||||
}
|
||||
|
||||
var unionEdges = new List<ReachabilityUnionEdge>(entryTrace.Edges.Length);
|
||||
foreach (var edge in entryTrace.Edges)
|
||||
{
|
||||
if (!nodeMap.TryGetValue(edge.FromNodeId, out var fromNode) || !nodeMap.TryGetValue(edge.ToNodeId, out var toNode))
|
||||
if (!symbolMap.TryGetValue(edge.FromNodeId, out var fromId) || !symbolMap.TryGetValue(edge.ToNodeId, out var toId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var fromId = ComputeSymbolId("shell", fromNode.DisplayName, fromNode.Kind.ToString());
|
||||
var toId = ComputeSymbolId("shell", toNode.DisplayName, toNode.Kind.ToString());
|
||||
unionEdges.Add(new ReachabilityUnionEdge(
|
||||
From: fromId,
|
||||
To: toId,
|
||||
@@ -78,15 +83,13 @@ public sealed class ReachabilityBuildStageExecutor : IScanStageExecutor
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private static string ComputeSymbolId(string lang, string display, string kind)
|
||||
private static string FormatCommand(EntryTraceNode node)
|
||||
{
|
||||
using var sha = SHA256.Create();
|
||||
var input = Encoding.UTF8.GetBytes((display ?? string.Empty) + "|" + (kind ?? string.Empty));
|
||||
var hash = sha.ComputeHash(input);
|
||||
var base64 = Convert.ToBase64String(hash)
|
||||
.TrimEnd('=')
|
||||
.Replace('+', '-')
|
||||
.Replace('/', '_');
|
||||
return $"sym:{lang}:{base64}";
|
||||
if (!node.Arguments.IsDefaultOrEmpty && node.Arguments.Length > 0)
|
||||
{
|
||||
return string.Join(' ', node.Arguments);
|
||||
}
|
||||
|
||||
return node.Kind.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,22 @@ public static class CodeId
|
||||
return Build("dotnet", tuple);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a binary code-id using canonical address + length tuple.
|
||||
/// This aligns with function-level evidence expectations for richgraph-v1.
|
||||
/// </summary>
|
||||
/// <param name="format">Binary format (elf, pe, macho).</param>
|
||||
/// <param name="fileHash">Digest of the binary or object file.</param>
|
||||
/// <param name="address">Virtual address (hex or decimal). Normalized to 0x prefix and lower-case.</param>
|
||||
/// <param name="lengthBytes">Optional length in bytes.</param>
|
||||
/// <param name="section">Optional section name.</param>
|
||||
/// <param name="codeBlockHash">Optional hash of the code block for stripped binaries.</param>
|
||||
public static string ForBinarySegment(string format, string fileHash, string address, long? lengthBytes = null, string? section = null, string? codeBlockHash = null)
|
||||
{
|
||||
var tuple = $"{Norm(format)}\0{Norm(fileHash)}\0{NormalizeAddress(address)}\0{NormalizeLength(lengthBytes)}\0{Norm(section)}\0{Norm(codeBlockHash)}";
|
||||
return Build("binary", tuple);
|
||||
}
|
||||
|
||||
public static string ForNode(string packageName, string entryPath)
|
||||
{
|
||||
var tuple = $"{Norm(packageName)}\0{Norm(entryPath)}";
|
||||
@@ -48,5 +64,48 @@ public static class CodeId
|
||||
return $"code:{lang}:{base64}";
|
||||
}
|
||||
|
||||
private static string NormalizeAddress(string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return "0x0";
|
||||
}
|
||||
|
||||
var addrText = value.Trim();
|
||||
var isHex = addrText.StartsWith("0x", StringComparison.OrdinalIgnoreCase);
|
||||
if (isHex)
|
||||
{
|
||||
addrText = addrText[2..];
|
||||
}
|
||||
|
||||
if (long.TryParse(addrText, isHex ? System.Globalization.NumberStyles.HexNumber : System.Globalization.NumberStyles.Integer, System.Globalization.CultureInfo.InvariantCulture, out var addrValue))
|
||||
{
|
||||
if (addrValue < 0)
|
||||
{
|
||||
addrValue = 0;
|
||||
}
|
||||
|
||||
return $"0x{addrValue:x}";
|
||||
}
|
||||
|
||||
addrText = addrText.TrimStart('0');
|
||||
if (addrText.Length == 0)
|
||||
{
|
||||
addrText = "0";
|
||||
}
|
||||
|
||||
return $"0x{addrText.ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static string NormalizeLength(long? value)
|
||||
{
|
||||
if (value is null or <= 0)
|
||||
{
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
return value.Value.ToString("D", System.Globalization.CultureInfo.InvariantCulture);
|
||||
}
|
||||
|
||||
private static string Norm(string? value) => (value ?? string.Empty).Trim();
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability;
|
||||
|
||||
|
||||
@@ -132,14 +132,21 @@ public static class SymbolId
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a binary symbol ID from ELF/PE/Mach-O components.
|
||||
/// Creates a binary symbol ID from ELF/PE/Mach-O components (legacy overload).
|
||||
/// </summary>
|
||||
/// <param name="buildId">Binary build-id (GNU build-id, PE GUID, Mach-O UUID).</param>
|
||||
/// <param name="section">Section name (e.g., ".text", ".dynsym").</param>
|
||||
/// <param name="symbolName">Symbol name from symbol table.</param>
|
||||
public static string ForBinary(string buildId, string section, string symbolName)
|
||||
=> ForBinaryAddressed(buildId, section, string.Empty, symbolName, "static", null);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a binary symbol ID that includes file hash, section, address, and linkage.
|
||||
/// Aligns with {file:hash, section, addr, name, linkage} tuple used by richgraph-v1.
|
||||
/// </summary>
|
||||
public static string ForBinaryAddressed(string fileHash, string section, string address, string symbolName, string linkage, string? codeBlockHash = null)
|
||||
{
|
||||
var tuple = $"{Norm(buildId)}\0{Norm(section)}\0{Norm(symbolName)}";
|
||||
var tuple = $"{Norm(fileHash)}\0{Norm(section)}\0{NormalizeAddress(address)}\0{Norm(symbolName)}\0{Norm(linkage)}\0{Norm(codeBlockHash)}";
|
||||
return Build(Lang.Binary, tuple);
|
||||
}
|
||||
|
||||
@@ -219,6 +226,40 @@ public static class SymbolId
|
||||
return $"sym:{lang}:{hash}";
|
||||
}
|
||||
|
||||
private static string NormalizeAddress(string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return "0x0";
|
||||
}
|
||||
|
||||
var addrText = value.Trim();
|
||||
var isHex = addrText.StartsWith("0x", StringComparison.OrdinalIgnoreCase);
|
||||
if (isHex)
|
||||
{
|
||||
addrText = addrText[2..];
|
||||
}
|
||||
|
||||
if (long.TryParse(addrText, isHex ? System.Globalization.NumberStyles.HexNumber : System.Globalization.NumberStyles.Integer, System.Globalization.CultureInfo.InvariantCulture, out var addrValue))
|
||||
{
|
||||
if (addrValue < 0)
|
||||
{
|
||||
addrValue = 0;
|
||||
}
|
||||
|
||||
return $"0x{addrValue:x}";
|
||||
}
|
||||
|
||||
// Fallback to normalized string representation
|
||||
addrText = addrText.TrimStart('0');
|
||||
if (addrText.Length == 0)
|
||||
{
|
||||
addrText = "0";
|
||||
}
|
||||
|
||||
return $"0x{addrText.ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static string ComputeFragment(string tuple)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(tuple);
|
||||
|
||||
@@ -16,8 +16,8 @@ public class RichGraphWriterTests
|
||||
var union = new ReachabilityUnionGraph(
|
||||
Nodes: new[]
|
||||
{
|
||||
new ReachabilityUnionNode("sym:dotnet:B", "dotnet", "method", display: "B"),
|
||||
new ReachabilityUnionNode("sym:dotnet:A", "dotnet", "method", display: "A")
|
||||
new ReachabilityUnionNode("sym:dotnet:B", "dotnet", "method", "B"),
|
||||
new ReachabilityUnionNode("sym:dotnet:A", "dotnet", "method", "A")
|
||||
},
|
||||
Edges: new[]
|
||||
{
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
using StellaOps.Scanner.Reachability;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Tests;
|
||||
|
||||
public class SymbolIdTests
|
||||
{
|
||||
[Fact]
|
||||
public void ForBinaryAddressed_NormalizesAddressAndKeepsLinkage()
|
||||
{
|
||||
var id1 = SymbolId.ForBinaryAddressed("sha256:deadbeef", ".text", "0x0040", "foo", "weak");
|
||||
var id2 = SymbolId.ForBinaryAddressed("sha256:deadbeef", ".text", "0x40", "foo", "weak");
|
||||
|
||||
Assert.Equal(id1, id2);
|
||||
|
||||
var id3 = SymbolId.ForBinaryAddressed("sha256:deadbeef", ".text", "40", "foo", "strong");
|
||||
Assert.NotEqual(id1, id3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CodeIdBinarySegment_NormalizesAddressAndLength()
|
||||
{
|
||||
var cid1 = CodeId.ForBinarySegment("elf", "sha256:abc", "0X0010", 64, ".text");
|
||||
var cid2 = CodeId.ForBinarySegment("elf", "sha256:abc", "16", 64, ".text");
|
||||
|
||||
Assert.Equal(cid1, cid2);
|
||||
|
||||
var cid3 = CodeId.ForBinarySegment("elf", "sha256:abc", "0x20", 32, ".text");
|
||||
Assert.NotEqual(cid1, cid3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SymbolIdForShell_RemainsStableForSameCommand()
|
||||
{
|
||||
var id1 = SymbolId.ForShell("/entrypoint.sh", "python -m app");
|
||||
var id2 = SymbolId.ForShell("/entrypoint.sh", "python -m app");
|
||||
|
||||
Assert.Equal(id1, id2);
|
||||
}
|
||||
}
|
||||
@@ -66,6 +66,8 @@ describe('ConsoleStatusClient', () => {
|
||||
const req = httpMock.expectOne('/console/status');
|
||||
expect(req.request.method).toBe('GET');
|
||||
expect(req.request.headers.get('X-StellaOps-Tenant')).toBe('tenant-dev');
|
||||
expect(req.request.headers.get('X-Stella-Trace-Id')).toBeTruthy();
|
||||
expect(req.request.headers.get('X-Stella-Request-Id')).toBeTruthy();
|
||||
req.flush(sample);
|
||||
});
|
||||
|
||||
@@ -75,7 +77,8 @@ describe('ConsoleStatusClient', () => {
|
||||
|
||||
expect(eventSourceFactory).toHaveBeenCalled();
|
||||
const url = eventSourceFactory.calls.mostRecent().args[0];
|
||||
expect(url).toBe('/console/runs/run-123/stream?tenant=tenant-dev');
|
||||
expect(url).toContain('/console/runs/run-123/stream?tenant=tenant-dev');
|
||||
expect(url).toContain('traceId=');
|
||||
|
||||
// Simulate incoming message
|
||||
const fakeSource = eventSourceFactory.calls.mostRecent().returnValue as unknown as FakeEventSource;
|
||||
|
||||
@@ -5,6 +5,7 @@ import { map } from 'rxjs/operators';
|
||||
|
||||
import { AuthSessionStore } from '../auth/auth-session.store';
|
||||
import { ConsoleRunEventDto, ConsoleStatusDto } from './console-status.models';
|
||||
import { generateTraceId } from './trace.util';
|
||||
|
||||
export const CONSOLE_API_BASE_URL = new InjectionToken<string>('CONSOLE_API_BASE_URL');
|
||||
|
||||
@@ -29,9 +30,15 @@ export class ConsoleStatusClient {
|
||||
/**
|
||||
* Poll console status (queue lag, backlog, run counts).
|
||||
*/
|
||||
getStatus(tenantId?: string): Observable<ConsoleStatusDto> {
|
||||
getStatus(tenantId?: string, traceId?: string): Observable<ConsoleStatusDto> {
|
||||
const tenant = this.resolveTenant(tenantId);
|
||||
const headers = new HttpHeaders({ 'X-StellaOps-Tenant': tenant });
|
||||
const trace = traceId ?? generateTraceId();
|
||||
const headers = new HttpHeaders({
|
||||
'X-StellaOps-Tenant': tenant,
|
||||
'X-Stella-Trace-Id': trace,
|
||||
'X-Stella-Request-Id': trace,
|
||||
});
|
||||
|
||||
return this.http.get<ConsoleStatusDto>(`${this.baseUrl}/status`, { headers }).pipe(
|
||||
map((dto) => ({
|
||||
...dto,
|
||||
@@ -50,9 +57,10 @@ export class ConsoleStatusClient {
|
||||
* Subscribe to streaming updates for a specific run via SSE.
|
||||
* Caller is responsible for unsubscribing to close the connection.
|
||||
*/
|
||||
streamRun(runId: string, tenantId?: string): Observable<ConsoleRunEventDto> {
|
||||
streamRun(runId: string, tenantId?: string, traceId?: string): Observable<ConsoleRunEventDto> {
|
||||
const tenant = this.resolveTenant(tenantId);
|
||||
const params = new HttpParams().set('tenant', tenant);
|
||||
const trace = traceId ?? generateTraceId();
|
||||
const params = new HttpParams().set('tenant', tenant).set('traceId', trace);
|
||||
const url = `${this.baseUrl}/runs/${encodeURIComponent(runId)}/stream?${params.toString()}`;
|
||||
|
||||
return new Observable<ConsoleRunEventDto>((observer) => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { HttpClient, HttpHeaders, HttpParams } from '@angular/common/http';
|
||||
import { HttpClient, HttpErrorResponse, HttpHeaders, HttpParams } from '@angular/common/http';
|
||||
import { Inject, Injectable, InjectionToken } from '@angular/core';
|
||||
import { Observable, map } from 'rxjs';
|
||||
import { Observable, catchError, map, throwError } from 'rxjs';
|
||||
|
||||
import { AuthSessionStore } from '../auth/auth-session.store';
|
||||
import { RiskApi } from './risk.client';
|
||||
@@ -9,6 +9,12 @@ import { generateTraceId } from './trace.util';
|
||||
|
||||
export const RISK_API_BASE_URL = new InjectionToken<string>('RISK_API_BASE_URL');
|
||||
|
||||
export class RateLimitError extends Error {
|
||||
constructor(public readonly retryAfterMs?: number) {
|
||||
super('rate-limit');
|
||||
}
|
||||
}
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class RiskHttpClient implements RiskApi {
|
||||
constructor(
|
||||
@@ -35,7 +41,8 @@ export class RiskHttpClient implements RiskApi {
|
||||
...page,
|
||||
page: page.page ?? 1,
|
||||
pageSize: page.pageSize ?? 20,
|
||||
}))
|
||||
}),
|
||||
catchError((err) => throwError(() => this.normalizeError(err)))
|
||||
);
|
||||
}
|
||||
|
||||
@@ -50,10 +57,23 @@ export class RiskHttpClient implements RiskApi {
|
||||
map((stats) => ({
|
||||
countsBySeverity: stats.countsBySeverity,
|
||||
lastComputation: stats.lastComputation ?? '1970-01-01T00:00:00Z',
|
||||
}))
|
||||
})),
|
||||
catchError((err) => throwError(() => this.normalizeError(err)))
|
||||
);
|
||||
}
|
||||
|
||||
private normalizeError(err: unknown): Error {
|
||||
if (err instanceof RateLimitError) return err;
|
||||
if (err instanceof HttpErrorResponse && err.status === 429) {
|
||||
const retryAfter = err.headers.get('Retry-After');
|
||||
const retryAfterMs = retryAfter ? Number(retryAfter) * 1000 : undefined;
|
||||
return new RateLimitError(Number.isFinite(retryAfterMs) ? retryAfterMs : undefined);
|
||||
}
|
||||
|
||||
if (err instanceof Error) return err;
|
||||
return new Error('Risk API request failed');
|
||||
}
|
||||
|
||||
private buildHeaders(tenantId: string, projectId?: string, traceId?: string): HttpHeaders {
|
||||
let headers = new HttpHeaders({ 'X-Stella-Tenant': tenantId });
|
||||
if (projectId) headers = headers.set('X-Stella-Project', projectId);
|
||||
|
||||
@@ -2,6 +2,7 @@ import { TestBed } from '@angular/core/testing';
|
||||
import { of, throwError } from 'rxjs';
|
||||
|
||||
import { RISK_API } from './risk.client';
|
||||
import { RateLimitError } from './risk-http.client';
|
||||
import { RiskQueryOptions, RiskResultPage, RiskStats } from './risk.models';
|
||||
import { RiskStore } from './risk.store';
|
||||
|
||||
@@ -47,6 +48,14 @@ describe('RiskStore', () => {
|
||||
expect(store.error()).toBe('boom');
|
||||
});
|
||||
|
||||
it('reports rate limit errors with retry hint', () => {
|
||||
apiSpy.list.and.returnValue(throwError(() => new RateLimitError(5000)));
|
||||
|
||||
store.fetchList(defaultOptions);
|
||||
|
||||
expect(store.error()).toContain('retry after 5s');
|
||||
});
|
||||
|
||||
it('stores stats results', () => {
|
||||
const stats: RiskStats = {
|
||||
countsBySeverity: { none: 0, info: 0, low: 1, medium: 0, high: 1, critical: 0 },
|
||||
|
||||
@@ -2,6 +2,7 @@ import { inject, Injectable, Signal, computed, signal } from '@angular/core';
|
||||
import { finalize } from 'rxjs/operators';
|
||||
|
||||
import { RISK_API, RiskApi } from './risk.client';
|
||||
import { RateLimitError } from './risk-http.client';
|
||||
import { RiskQueryOptions, RiskResultPage, RiskStats } from './risk.models';
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
@@ -47,6 +48,13 @@ export class RiskStore {
|
||||
}
|
||||
|
||||
private normalizeError(err: unknown): string {
|
||||
if (err instanceof RateLimitError) {
|
||||
if (err.retryAfterMs && Number.isFinite(err.retryAfterMs)) {
|
||||
const seconds = Math.ceil(err.retryAfterMs / 1000);
|
||||
return `Rate limited; retry after ${seconds}s`;
|
||||
}
|
||||
return 'Rate limited; retry shortly';
|
||||
}
|
||||
if (err instanceof Error) return err.message;
|
||||
return 'Unknown error fetching risk data';
|
||||
}
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
import { TestBed } from '@angular/core/testing';
|
||||
import { Subject, of } from 'rxjs';
|
||||
|
||||
import { ConsoleRunEventDto, ConsoleStatusDto } from '../api/console-status.models';
|
||||
import { ConsoleStatusClient } from '../api/console-status.client';
|
||||
import { ConsoleStatusService } from './console-status.service';
|
||||
import { ConsoleStatusStore } from './console-status.store';
|
||||
|
||||
class FakeConsoleStatusClient {
|
||||
public streams: { subject: Subject<ConsoleRunEventDto>; traceId?: string }[] = [];
|
||||
|
||||
getStatus(): any {
|
||||
const dto: ConsoleStatusDto = {
|
||||
backlog: 0,
|
||||
queueLagMs: 0,
|
||||
activeRuns: 0,
|
||||
pendingRuns: 0,
|
||||
healthy: true,
|
||||
lastCompletedRunId: null,
|
||||
lastCompletedAt: null,
|
||||
};
|
||||
return of(dto);
|
||||
}
|
||||
|
||||
streamRun(_runId: string, _tenantId?: string, traceId?: string) {
|
||||
const subject = new Subject<ConsoleRunEventDto>();
|
||||
this.streams.push({ subject, traceId });
|
||||
return subject.asObservable();
|
||||
}
|
||||
}
|
||||
|
||||
describe('ConsoleStatusService', () => {
|
||||
let service: ConsoleStatusService;
|
||||
let client: FakeConsoleStatusClient;
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
providers: [
|
||||
ConsoleStatusStore,
|
||||
ConsoleStatusService,
|
||||
{ provide: ConsoleStatusClient, useClass: FakeConsoleStatusClient },
|
||||
],
|
||||
});
|
||||
|
||||
service = TestBed.inject(ConsoleStatusService);
|
||||
client = TestBed.inject(ConsoleStatusClient) as unknown as FakeConsoleStatusClient;
|
||||
jasmine.clock().install();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jasmine.clock().uninstall();
|
||||
});
|
||||
|
||||
it('reconnects when heartbeat is missed', () => {
|
||||
const sub = service.subscribeToRun('run-1', { heartbeatMs: 5, maxRetries: 2, traceId: 'trace-heartbeat' });
|
||||
|
||||
expect(client.streams.length).toBe(1);
|
||||
jasmine.clock().tick(6);
|
||||
expect(client.streams.length).toBe(2);
|
||||
|
||||
sub.unsubscribe();
|
||||
});
|
||||
|
||||
it('retries after stream errors with backoff', () => {
|
||||
const sub = service.subscribeToRun('run-2', { maxRetries: 1, heartbeatMs: 50, traceId: 'trace-error' });
|
||||
|
||||
expect(client.streams.length).toBe(1);
|
||||
client.streams[0].subject.error(new Error('boom'));
|
||||
|
||||
jasmine.clock().tick(1001);
|
||||
expect(client.streams.length).toBe(2);
|
||||
|
||||
sub.unsubscribe();
|
||||
});
|
||||
});
|
||||
@@ -5,6 +5,14 @@ import { switchMap } from 'rxjs/operators';
|
||||
import { ConsoleStatusClient } from '../api/console-status.client';
|
||||
import { ConsoleRunEventDto, ConsoleStatusDto } from '../api/console-status.models';
|
||||
import { ConsoleStatusStore } from './console-status.store';
|
||||
import { generateTraceId } from '../api/trace.util';
|
||||
|
||||
export interface RunStreamOptions {
|
||||
heartbeatMs?: number;
|
||||
maxRetries?: number;
|
||||
traceId?: string;
|
||||
tenantId?: string;
|
||||
}
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root',
|
||||
@@ -58,14 +66,65 @@ export class ConsoleStatusService {
|
||||
/**
|
||||
* Subscribe to run stream events for a given run id.
|
||||
*/
|
||||
subscribeToRun(runId: string): Subscription {
|
||||
subscribeToRun(runId: string, options?: RunStreamOptions): Subscription {
|
||||
this.store.clearEvents();
|
||||
return this.client.streamRun(runId).subscribe({
|
||||
next: (evt: ConsoleRunEventDto) => this.store.appendRunEvent(evt),
|
||||
error: (err) => {
|
||||
console.error('console run stream error', err);
|
||||
this.store.setError('Run stream disconnected');
|
||||
},
|
||||
|
||||
const traceId = options?.traceId ?? generateTraceId();
|
||||
const heartbeatMs = options?.heartbeatMs ?? 15000;
|
||||
const maxRetries = options?.maxRetries ?? 3;
|
||||
const tenantId = options?.tenantId;
|
||||
|
||||
let retries = 0;
|
||||
let heartbeatHandle: ReturnType<typeof setTimeout> | undefined;
|
||||
let innerSub: Subscription | null = null;
|
||||
let disposed = false;
|
||||
|
||||
const clearHeartbeat = () => {
|
||||
if (heartbeatHandle) {
|
||||
clearTimeout(heartbeatHandle);
|
||||
heartbeatHandle = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const scheduleHeartbeat = () => {
|
||||
clearHeartbeat();
|
||||
heartbeatHandle = setTimeout(() => {
|
||||
handleError(new Error('heartbeat-timeout'));
|
||||
}, heartbeatMs);
|
||||
};
|
||||
|
||||
const handleError = (err: unknown) => {
|
||||
console.error('console run stream error', err);
|
||||
this.store.setError('Run stream disconnected');
|
||||
if (disposed || retries >= maxRetries) {
|
||||
return;
|
||||
}
|
||||
const delay = Math.min(1000 * Math.pow(2, retries), 30000);
|
||||
retries += 1;
|
||||
setTimeout(connect, delay);
|
||||
};
|
||||
|
||||
const connect = () => {
|
||||
if (disposed) return;
|
||||
innerSub?.unsubscribe();
|
||||
const stream$ = this.client.streamRun(runId, tenantId, traceId);
|
||||
innerSub = stream$.subscribe({
|
||||
next: (evt: ConsoleRunEventDto) => {
|
||||
retries = 0;
|
||||
this.store.appendRunEvent(evt);
|
||||
scheduleHeartbeat();
|
||||
},
|
||||
error: handleError,
|
||||
});
|
||||
scheduleHeartbeat();
|
||||
};
|
||||
|
||||
connect();
|
||||
|
||||
return new Subscription(() => {
|
||||
disposed = true;
|
||||
clearHeartbeat();
|
||||
innerSub?.unsubscribe();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user