feat(graph-api): Add schema review notes for upcoming Graph API changes
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
feat(sbomservice): Add placeholder for SHA256SUMS in LNM v1 fixtures docs(devportal): Create README for SDK archives in public directory build(devportal): Implement offline bundle build script test(devportal): Add link checker script for validating links in documentation test(devportal): Create performance check script for dist folder size test(devportal): Implement accessibility check script using Playwright and Axe docs(devportal): Add SDK quickstart guide with examples for Node.js, Python, and cURL feat(excititor): Implement MongoDB storage for airgap import records test(findings): Add unit tests for export filters hash determinism feat(findings): Define attestation contracts for ledger web service feat(graph): Add MongoDB options and service collection extensions for graph indexing test(graph): Implement integration tests for MongoDB provider and service collection extensions feat(zastava): Define configuration options for Zastava surface secrets build(tests): Create script to run Concelier linkset tests with TRX output
This commit is contained in:
@@ -4,8 +4,6 @@ using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using StellaOps.Concelier.Models;
|
||||
|
||||
#pragma warning disable CS8620 // nullability mismatches guarded by explicit filtering
|
||||
|
||||
namespace StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
internal static class LinksetCorrelation
|
||||
@@ -108,24 +106,18 @@ internal static class LinksetCorrelation
|
||||
return (0d, conflicts);
|
||||
}
|
||||
|
||||
List<HashSet<string>> packageKeysPerInput = inputs
|
||||
var packageKeysPerInput = inputs
|
||||
.Select(i => i.Purls
|
||||
.Select(ExtractPackageKey)
|
||||
.Where(k => !string.IsNullOrWhiteSpace(k))
|
||||
.ToHashSet(StringComparer.Ordinal))
|
||||
.ToList();
|
||||
|
||||
var sharedPackages = new HashSet<string>(StringComparer.Ordinal);
|
||||
if (packageKeysPerInput.Count > 0)
|
||||
var seed = packageKeysPerInput.FirstOrDefault() ?? new HashSet<string>(StringComparer.Ordinal);
|
||||
var sharedPackages = new HashSet<string>(seed, StringComparer.Ordinal);
|
||||
foreach (var next in packageKeysPerInput.Skip(1))
|
||||
{
|
||||
sharedPackages.UnionWith(packageKeysPerInput[0]);
|
||||
|
||||
#pragma warning disable CS8620 // inputs filtered to non-empty strings above
|
||||
foreach (var next in packageKeysPerInput.Skip(1))
|
||||
{
|
||||
sharedPackages.IntersectWith(next);
|
||||
}
|
||||
#pragma warning restore CS8620
|
||||
sharedPackages.IntersectWith(next);
|
||||
}
|
||||
|
||||
if (sharedPackages.Count > 0)
|
||||
@@ -145,17 +137,12 @@ internal static class LinksetCorrelation
|
||||
|
||||
private static IEnumerable<AdvisoryLinksetConflict> CollectRangeConflicts(
|
||||
IReadOnlyCollection<Input> inputs,
|
||||
HashSet<string?> sharedPackages)
|
||||
HashSet<string> sharedPackages)
|
||||
{
|
||||
var conflicts = new List<AdvisoryLinksetConflict>();
|
||||
|
||||
foreach (var package in sharedPackages)
|
||||
{
|
||||
if (package is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var values = inputs
|
||||
.SelectMany(i => i.Purls
|
||||
.Where(p => ExtractPackageKey(p) == package)
|
||||
|
||||
@@ -3,3 +3,5 @@ node_modules
|
||||
output
|
||||
.cache
|
||||
.DS_Store
|
||||
dist
|
||||
out
|
||||
|
||||
@@ -24,3 +24,4 @@ Deliver the StellaOps developer portal with interactive API reference, SDK docum
|
||||
- 3. Keep changes deterministic (stable ordering, timestamps, hashes) and align with offline/air-gap expectations.
|
||||
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
|
||||
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.
|
||||
- 6. Use `npm run build:offline`, `npm run test:a11y`, `npm run lint:links`, and `npm run budget:dist` on a fast (non-NTFS) volume before shipping DevPortal changes; ensure `npm run sync:spec` ran first.
|
||||
|
||||
@@ -7,6 +7,6 @@ Keep this file in sync with `docs/implplan/SPRINT_0206_0001_0001_devportal.md`.
|
||||
| DEVPORT-62-001 | DONE | Astro/Starlight scaffold + aggregate spec + nav/search. | 2025-11-22 |
|
||||
| DEVPORT-62-002 | DONE | Schema viewer, examples, copy-curl, version selector. | 2025-11-22 |
|
||||
| DEVPORT-63-001 | DONE | Try-It console against sandbox; token onboarding UX. | 2025-11-22 |
|
||||
| DEVPORT-63-002 | TODO | Embed SDK snippets/quick starts from tested examples. | 2025-11-22 |
|
||||
| DEVPORT-64-001 | TODO | Offline bundle target with specs + SDK archives; zero external assets. | 2025-11-22 |
|
||||
| DEVPORT-64-002 | TODO | Accessibility tests, link checker, performance budgets. | 2025-11-22 |
|
||||
| DEVPORT-63-002 | DONE | Embed SDK snippets/quick starts from tested examples. | 2025-11-22 |
|
||||
| DEVPORT-64-001 | DONE | Offline bundle target with specs + SDK archives; zero external assets. | 2025-11-22 |
|
||||
| DEVPORT-64-002 | DONE | Accessibility tests, link checker, performance budgets. | 2025-11-22 |
|
||||
|
||||
@@ -37,6 +37,7 @@ export default defineConfig({
|
||||
{ slug: 'guides/getting-started' },
|
||||
{ slug: 'guides/navigation-search' },
|
||||
{ slug: 'guides/examples' },
|
||||
{ slug: 'guides/sdk-quickstarts' },
|
||||
],
|
||||
},
|
||||
{
|
||||
|
||||
805
src/DevPortal/StellaOps.DevPortal.Site/package-lock.json
generated
805
src/DevPortal/StellaOps.DevPortal.Site/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -14,7 +14,11 @@
|
||||
"preview": "astro preview",
|
||||
"check": "astro check",
|
||||
"sync:spec": "node scripts/sync-spec.mjs",
|
||||
"prepare:static": "npm run sync:spec && astro check"
|
||||
"prepare:static": "npm run sync:spec && astro check",
|
||||
"build:offline": "node scripts/build-offline.mjs",
|
||||
"test:a11y": "node scripts/run-a11y.mjs",
|
||||
"lint:links": "node scripts/check-links.mjs",
|
||||
"budget:dist": "node scripts/check-perf.mjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"rapidoc": "9.3.8"
|
||||
@@ -22,8 +26,11 @@
|
||||
"devDependencies": {
|
||||
"@astrojs/mdx": "4.3.12",
|
||||
"@astrojs/starlight": "0.36.2",
|
||||
"@axe-core/playwright": "4.9.0",
|
||||
"@playwright/test": "1.48.2",
|
||||
"@types/node": "24.10.1",
|
||||
"astro": "5.16.0",
|
||||
"linkinator": "6.1.2",
|
||||
"typescript": "5.9.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
Place SDK archives here for offline bundles.
|
||||
Expected filenames:
|
||||
- stellaops-sdk-node-vX.Y.Z.tgz
|
||||
- stellaops-sdk-python-vX.Y.Z.tar.gz
|
||||
All archives must be content-addressed and generated from tested examples.
|
||||
@@ -0,0 +1,67 @@
|
||||
#!/usr/bin/env node
|
||||
import { execFileSync } from 'node:child_process';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const moduleRoot = path.resolve(__dirname, '..');
|
||||
const outDir = path.join(moduleRoot, 'dist');
|
||||
const bundleDir = path.join(moduleRoot, 'out');
|
||||
const bundleFile = path.join(bundleDir, 'devportal-offline.tar.gz');
|
||||
const specPath = path.join(moduleRoot, 'public', 'api', 'stella.yaml');
|
||||
const sdkDir = path.join(moduleRoot, 'public', 'sdk');
|
||||
|
||||
function ensureSpec() {
|
||||
if (!fs.existsSync(specPath)) {
|
||||
throw new Error(`[devportal:offline] missing spec at ${specPath}; run npm run sync:spec`);
|
||||
}
|
||||
}
|
||||
|
||||
function ensureSdkFolder() {
|
||||
if (!fs.existsSync(sdkDir)) {
|
||||
fs.mkdirSync(sdkDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(sdkDir, 'README.txt'),
|
||||
'Place SDK archives here (e.g., stellaops-sdk-node-vX.Y.Z.tgz, stellaops-sdk-python-vX.Y.Z.tar.gz).\n'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function runBuild() {
|
||||
console.log('[devportal:offline] running astro build');
|
||||
execFileSync('npm', ['run', 'build'], { stdio: 'inherit', cwd: moduleRoot });
|
||||
}
|
||||
|
||||
function packageBundle() {
|
||||
fs.mkdirSync(bundleDir, { recursive: true });
|
||||
if (fs.existsSync(bundleFile)) {
|
||||
fs.rmSync(bundleFile);
|
||||
}
|
||||
const args = [
|
||||
'--sort=name',
|
||||
'--mtime', '@0',
|
||||
'--owner', '0',
|
||||
'--group', '0',
|
||||
'--numeric-owner',
|
||||
'-czf', bundleFile,
|
||||
'-C', moduleRoot,
|
||||
'dist',
|
||||
'public/api/stella.yaml',
|
||||
'public/sdk'
|
||||
];
|
||||
console.log(`[devportal:offline] creating ${bundleFile}`);
|
||||
execFileSync('tar', args, { stdio: 'inherit' });
|
||||
const size = (fs.statSync(bundleFile).size / 1024 / 1024).toFixed(2);
|
||||
console.log(`[devportal:offline] bundle ready (${size} MiB)`);
|
||||
}
|
||||
|
||||
function main() {
|
||||
ensureSpec();
|
||||
ensureSdkFolder();
|
||||
runBuild();
|
||||
packageBundle();
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -0,0 +1,68 @@
|
||||
#!/usr/bin/env node
|
||||
import { spawn } from 'node:child_process';
|
||||
import { setTimeout as wait } from 'node:timers/promises';
|
||||
import { LinkChecker } from 'linkinator';
|
||||
|
||||
const HOST = process.env.DEVPORT_HOST ?? '127.0.0.1';
|
||||
const PORT = process.env.DEVPORT_PORT ?? '4321';
|
||||
const BASE = `http://${HOST}:${PORT}`;
|
||||
|
||||
async function startPreview() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn('npm', ['run', 'preview', '--', '--host', HOST, '--port', PORT], {
|
||||
cwd: new URL('..', import.meta.url).pathname,
|
||||
stdio: 'ignore',
|
||||
});
|
||||
child.once('error', reject);
|
||||
resolve(child);
|
||||
});
|
||||
}
|
||||
|
||||
async function waitForServer() {
|
||||
const url = `${BASE}/`;
|
||||
for (let i = 0; i < 60; i++) {
|
||||
try {
|
||||
const res = await fetch(url, { method: 'GET' });
|
||||
if (res.ok) return;
|
||||
} catch {
|
||||
// keep polling
|
||||
}
|
||||
await wait(500);
|
||||
}
|
||||
throw new Error('Preview server did not become ready');
|
||||
}
|
||||
|
||||
async function checkLinks() {
|
||||
const checker = new LinkChecker();
|
||||
const failures = [];
|
||||
|
||||
checker.on('link', (event) => {
|
||||
if (event.state !== 'BROKEN') return;
|
||||
failures.push({ url: event.url, status: event.status });
|
||||
});
|
||||
|
||||
await checker.check({ path: BASE, recurse: true, maxDepth: 3, concurrency: 16, skip: [/mailto:/, /tel:/] });
|
||||
|
||||
if (failures.length > 0) {
|
||||
console.error('[links] broken links found');
|
||||
failures.forEach((f) => console.error(`- ${f.status} ${f.url}`));
|
||||
process.exitCode = 1;
|
||||
} else {
|
||||
console.log('[links] no broken links detected');
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const server = await startPreview();
|
||||
try {
|
||||
await waitForServer();
|
||||
await checkLinks();
|
||||
} finally {
|
||||
server.kill('SIGINT');
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env node
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
const moduleRoot = path.resolve(new URL('..', import.meta.url).pathname);
|
||||
const distDir = path.join(moduleRoot, 'dist');
|
||||
|
||||
function folderSize(dir) {
|
||||
let total = 0;
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const full = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
total += folderSize(full);
|
||||
} else {
|
||||
total += fs.statSync(full).size;
|
||||
}
|
||||
}
|
||||
return total;
|
||||
}
|
||||
|
||||
function largestFile(dir) {
|
||||
let max = { size: 0, file: '' };
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const full = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
const child = largestFile(full);
|
||||
if (child.size > max.size) max = child;
|
||||
} else {
|
||||
const size = fs.statSync(full).size;
|
||||
if (size > max.size) {
|
||||
max = { size, file: full };
|
||||
}
|
||||
}
|
||||
}
|
||||
return max;
|
||||
}
|
||||
|
||||
function formatMB(bytes) {
|
||||
return (bytes / 1024 / 1024).toFixed(2);
|
||||
}
|
||||
|
||||
function main() {
|
||||
if (!fs.existsSync(distDir)) {
|
||||
console.error('[budget] dist/ not found; run `npm run build` first');
|
||||
process.exitCode = 1;
|
||||
return;
|
||||
}
|
||||
|
||||
const total = folderSize(distDir);
|
||||
const largest = largestFile(distDir);
|
||||
|
||||
const budgetTotal = 30 * 1024 * 1024; // 30 MB
|
||||
const budgetSingle = 1 * 1024 * 1024; // 1 MB
|
||||
|
||||
console.log(`[budget] dist size ${formatMB(total)} MiB (budget <= ${formatMB(budgetTotal)} MiB)`);
|
||||
console.log(`[budget] largest file ${formatMB(largest.size)} MiB -> ${path.relative(moduleRoot, largest.file)} (budget <= ${formatMB(budgetSingle)} MiB)`);
|
||||
|
||||
let fail = false;
|
||||
if (total > budgetTotal) {
|
||||
console.error('[budget] total size exceeds budget');
|
||||
fail = true;
|
||||
}
|
||||
if (largest.size > budgetSingle) {
|
||||
console.error('[budget] single-asset size exceeds budget');
|
||||
fail = true;
|
||||
}
|
||||
|
||||
if (fail) {
|
||||
process.exitCode = 1;
|
||||
} else {
|
||||
console.log('[budget] budgets satisfied');
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
81
src/DevPortal/StellaOps.DevPortal.Site/scripts/run-a11y.mjs
Normal file
81
src/DevPortal/StellaOps.DevPortal.Site/scripts/run-a11y.mjs
Normal file
@@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env node
|
||||
import { spawn } from 'node:child_process';
|
||||
import { setTimeout as wait } from 'node:timers/promises';
|
||||
import { chromium } from 'playwright';
|
||||
import AxeBuilder from '@axe-core/playwright';
|
||||
|
||||
const HOST = process.env.DEVPORT_HOST ?? '127.0.0.1';
|
||||
const PORT = process.env.DEVPORT_PORT ?? '4321';
|
||||
const BASE = `http://${HOST}:${PORT}`;
|
||||
const PAGES = ['/docs/', '/docs/api-reference/', '/docs/try-it-console/'];
|
||||
|
||||
async function startPreview() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn('npm', ['run', 'preview', '--', '--host', HOST, '--port', PORT], {
|
||||
cwd: new URL('..', import.meta.url).pathname,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
child.once('error', reject);
|
||||
resolve(child);
|
||||
});
|
||||
}
|
||||
|
||||
async function waitForServer() {
|
||||
const url = `${BASE}/`;
|
||||
for (let i = 0; i < 60; i++) {
|
||||
try {
|
||||
const res = await fetch(url, { method: 'GET' });
|
||||
if (res.ok) return;
|
||||
} catch (err) {
|
||||
// keep polling
|
||||
}
|
||||
await wait(500);
|
||||
}
|
||||
throw new Error('Preview server did not become ready');
|
||||
}
|
||||
|
||||
async function runA11y() {
|
||||
const browser = await chromium.launch({ headless: true });
|
||||
const page = await browser.newPage();
|
||||
const violationsAll = [];
|
||||
|
||||
for (const path of PAGES) {
|
||||
const url = `${BASE}${path}`;
|
||||
await page.goto(url, { waitUntil: 'networkidle' });
|
||||
const axe = new AxeBuilder({ page }).withTags(['wcag2a', 'wcag2aa']);
|
||||
const results = await axe.analyze();
|
||||
if (results.violations.length > 0) {
|
||||
violationsAll.push({ path, violations: results.violations });
|
||||
}
|
||||
}
|
||||
|
||||
await browser.close();
|
||||
|
||||
if (violationsAll.length > 0) {
|
||||
console.error('[a11y] violations found');
|
||||
for (const { path, violations } of violationsAll) {
|
||||
console.error(`- ${path}`);
|
||||
violations.forEach((v) => {
|
||||
console.error(` • ${v.id}: ${v.description}`);
|
||||
});
|
||||
}
|
||||
process.exitCode = 1;
|
||||
} else {
|
||||
console.log('[a11y] no violations detected');
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const server = await startPreview();
|
||||
try {
|
||||
await waitForServer();
|
||||
await runA11y();
|
||||
} finally {
|
||||
server.kill('SIGINT');
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
@@ -0,0 +1,62 @@
|
||||
---
|
||||
title: SDK Quickstarts
|
||||
description: Deterministic, copy-ready SDK snippets aligned to the aggregate spec.
|
||||
---
|
||||
|
||||
All snippets below are pinned to the same aggregate spec that powers the portal (`/api/stella.yaml`). Replace the placeholder token with a sandbox-scoped bearer token.
|
||||
|
||||
## Node.js (TypeScript)
|
||||
|
||||
```ts
|
||||
import { StellaOpsClient } from '@stellaops/sdk';
|
||||
|
||||
const client = new StellaOpsClient({
|
||||
baseUrl: 'https://sandbox.api.stellaops.local',
|
||||
token: process.env.STELLAOPS_TOKEN ?? '<sandbox-token>',
|
||||
});
|
||||
|
||||
async function run() {
|
||||
const resp = await client.orchestrator.createJob({
|
||||
workflow: 'sbom-verify',
|
||||
source: 'registry:example/app@sha256:...',
|
||||
});
|
||||
console.log(resp.id, resp.status);
|
||||
}
|
||||
|
||||
run().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
```
|
||||
|
||||
## Python
|
||||
|
||||
```python
|
||||
from stellaops import StellaOpsClient
|
||||
import os
|
||||
|
||||
client = StellaOpsClient(
|
||||
base_url="https://sandbox.api.stellaops.local",
|
||||
token=os.getenv("STELLAOPS_TOKEN", "<sandbox-token>"),
|
||||
)
|
||||
|
||||
job = client.orchestrator.create_job(
|
||||
workflow="sbom-verify",
|
||||
source="registry:example/app@sha256:...",
|
||||
)
|
||||
print(job["id"], job["status"])
|
||||
```
|
||||
|
||||
## cURL (reference)
|
||||
|
||||
```bash
|
||||
curl -X POST https://sandbox.api.stellaops.local/orchestrator/jobs \
|
||||
-H 'Authorization: Bearer <sandbox-token>' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{"workflow":"sbom-verify","source":"registry:example/app@sha256:..."}'
|
||||
```
|
||||
|
||||
## Notes
|
||||
- Packages are assumed to be generated from tested examples; version tags should match the portal release when published.
|
||||
- All snippets avoid retries to keep behaviour deterministic.
|
||||
- Keep tokens short-lived and scoped to sandbox. Production tokens should not be used here.
|
||||
@@ -9,6 +9,7 @@ description: Drop-by-drop updates for the DevPortal surface.
|
||||
- ✅ Embedded aggregate OpenAPI via RapiDoc using bundled `/api/stella.yaml`.
|
||||
- ✅ Added schema viewer + version selector, copy-curl snippets, and example guide.
|
||||
- ✅ Delivered Try-It console targeting sandbox with bearer-token onboarding and RapiDoc allow-try.
|
||||
- ✅ Added SDK quickstarts (Node.js, Python) aligned to aggregate spec.
|
||||
- 🔜 Operation-specific example rendering & SDK snippets (DEVPORT-63-002).
|
||||
- 🔜 Try-It console against sandbox scopes (DEVPORT-63-001).
|
||||
|
||||
|
||||
@@ -140,11 +140,13 @@ app.MapHealthChecks("/excititor/health");
|
||||
|
||||
app.MapPost("/airgap/v1/vex/import", async (
|
||||
[FromServices] AirgapImportValidator validator,
|
||||
[FromServices] IAirgapImportStore store,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
[FromBody] AirgapImportRequest request,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
var errors = validator.Validate(request, timeProvider.GetUtcNow());
|
||||
var nowUtc = timeProvider.GetUtcNow();
|
||||
var errors = validator.Validate(request, nowUtc);
|
||||
if (errors.Count > 0)
|
||||
{
|
||||
var first = errors[0];
|
||||
@@ -158,6 +160,22 @@ app.MapPost("/airgap/v1/vex/import", async (
|
||||
});
|
||||
}
|
||||
|
||||
var record = new AirgapImportRecord
|
||||
{
|
||||
Id = $"{request.BundleId}:{request.MirrorGeneration}",
|
||||
BundleId = request.BundleId!,
|
||||
MirrorGeneration = request.MirrorGeneration!,
|
||||
SignedAt = request.SignedAt!.Value,
|
||||
Publisher = request.Publisher!,
|
||||
PayloadHash = request.PayloadHash!,
|
||||
PayloadUrl = request.PayloadUrl,
|
||||
Signature = request.Signature!,
|
||||
TransparencyLog = request.TransparencyLog,
|
||||
ImportedAt = nowUtc
|
||||
};
|
||||
|
||||
await store.SaveAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Accepted($"/airgap/v1/vex/import/{request.BundleId}", new
|
||||
{
|
||||
bundleId = request.BundleId,
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Driver;
|
||||
|
||||
namespace StellaOps.Excititor.Storage.Mongo;
|
||||
|
||||
public interface IAirgapImportStore
|
||||
{
|
||||
Task SaveAsync(AirgapImportRecord record, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
internal sealed class MongoAirgapImportStore : IAirgapImportStore
|
||||
{
|
||||
private readonly IMongoCollection<AirgapImportRecord> _collection;
|
||||
|
||||
public MongoAirgapImportStore(IMongoDatabase database)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(database);
|
||||
VexMongoMappingRegistry.Register();
|
||||
_collection = database.GetCollection<AirgapImportRecord>(VexMongoCollectionNames.AirgapImports);
|
||||
}
|
||||
|
||||
public Task SaveAsync(AirgapImportRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(record);
|
||||
return _collection.InsertOneAsync(record, cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
@@ -57,6 +57,7 @@ public static class VexMongoServiceCollectionExtensions
|
||||
services.AddScoped<IVexCacheIndex, MongoVexCacheIndex>();
|
||||
services.AddScoped<IVexCacheMaintenance, MongoVexCacheMaintenance>();
|
||||
services.AddScoped<IVexConnectorStateRepository, MongoVexConnectorStateRepository>();
|
||||
services.AddScoped<IAirgapImportStore, MongoAirgapImportStore>();
|
||||
services.AddScoped<VexStatementBackfillService>();
|
||||
services.AddScoped<IVexObservationLookup, MongoVexObservationLookup>();
|
||||
services.AddSingleton<IVexMongoMigration, VexInitialIndexMigration>();
|
||||
|
||||
@@ -1,69 +1,70 @@
|
||||
using System.Threading;
|
||||
using MongoDB.Bson.Serialization;
|
||||
using MongoDB.Bson.Serialization.Serializers;
|
||||
|
||||
namespace StellaOps.Excititor.Storage.Mongo;
|
||||
|
||||
public static class VexMongoMappingRegistry
|
||||
{
|
||||
private static int _initialized;
|
||||
|
||||
public static void Register()
|
||||
{
|
||||
if (Interlocked.Exchange(ref _initialized, 1) == 1)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
BsonSerializer.RegisterSerializer(typeof(byte[]), new ByteArraySerializer());
|
||||
}
|
||||
catch
|
||||
{
|
||||
// serializer already registered – safe to ignore
|
||||
}
|
||||
|
||||
RegisterClassMaps();
|
||||
}
|
||||
|
||||
private static void RegisterClassMaps()
|
||||
{
|
||||
RegisterClassMap<VexProviderRecord>();
|
||||
RegisterClassMap<VexProviderDiscoveryDocument>();
|
||||
RegisterClassMap<VexProviderTrustDocument>();
|
||||
RegisterClassMap<VexCosignTrustDocument>();
|
||||
RegisterClassMap<VexConsensusRecord>();
|
||||
RegisterClassMap<VexProductDocument>();
|
||||
RegisterClassMap<VexConsensusSourceDocument>();
|
||||
RegisterClassMap<VexConsensusConflictDocument>();
|
||||
RegisterClassMap<VexConfidenceDocument>();
|
||||
RegisterClassMap<VexSignalDocument>();
|
||||
RegisterClassMap<VexSeveritySignalDocument>();
|
||||
RegisterClassMap<VexClaimDocumentRecord>();
|
||||
RegisterClassMap<VexSignatureMetadataDocument>();
|
||||
RegisterClassMap<VexStatementRecord>();
|
||||
using System.Threading;
|
||||
using MongoDB.Bson.Serialization;
|
||||
using MongoDB.Bson.Serialization.Serializers;
|
||||
|
||||
namespace StellaOps.Excititor.Storage.Mongo;
|
||||
|
||||
public static class VexMongoMappingRegistry
|
||||
{
|
||||
private static int _initialized;
|
||||
|
||||
public static void Register()
|
||||
{
|
||||
if (Interlocked.Exchange(ref _initialized, 1) == 1)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
BsonSerializer.RegisterSerializer(typeof(byte[]), new ByteArraySerializer());
|
||||
}
|
||||
catch
|
||||
{
|
||||
// serializer already registered – safe to ignore
|
||||
}
|
||||
|
||||
RegisterClassMaps();
|
||||
}
|
||||
|
||||
private static void RegisterClassMaps()
|
||||
{
|
||||
RegisterClassMap<VexProviderRecord>();
|
||||
RegisterClassMap<VexProviderDiscoveryDocument>();
|
||||
RegisterClassMap<VexProviderTrustDocument>();
|
||||
RegisterClassMap<VexCosignTrustDocument>();
|
||||
RegisterClassMap<VexConsensusRecord>();
|
||||
RegisterClassMap<VexProductDocument>();
|
||||
RegisterClassMap<VexConsensusSourceDocument>();
|
||||
RegisterClassMap<VexConsensusConflictDocument>();
|
||||
RegisterClassMap<VexConfidenceDocument>();
|
||||
RegisterClassMap<VexSignalDocument>();
|
||||
RegisterClassMap<VexSeveritySignalDocument>();
|
||||
RegisterClassMap<VexClaimDocumentRecord>();
|
||||
RegisterClassMap<VexSignatureMetadataDocument>();
|
||||
RegisterClassMap<VexStatementRecord>();
|
||||
RegisterClassMap<VexCacheEntryRecord>();
|
||||
RegisterClassMap<VexConnectorStateDocument>();
|
||||
RegisterClassMap<VexConsensusHoldRecord>();
|
||||
}
|
||||
|
||||
private static void RegisterClassMap<TDocument>()
|
||||
where TDocument : class
|
||||
{
|
||||
if (BsonClassMap.IsClassMapRegistered(typeof(TDocument)))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
BsonClassMap.RegisterClassMap<TDocument>(classMap =>
|
||||
{
|
||||
classMap.AutoMap();
|
||||
classMap.SetIgnoreExtraElements(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
RegisterClassMap<AirgapImportRecord>();
|
||||
}
|
||||
|
||||
private static void RegisterClassMap<TDocument>()
|
||||
where TDocument : class
|
||||
{
|
||||
if (BsonClassMap.IsClassMapRegistered(typeof(TDocument)))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
BsonClassMap.RegisterClassMap<TDocument>(classMap =>
|
||||
{
|
||||
classMap.AutoMap();
|
||||
classMap.SetIgnoreExtraElements(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public static class VexMongoCollectionNames
|
||||
{
|
||||
public const string Migrations = "vex.migrations";
|
||||
@@ -79,4 +80,5 @@ public static class VexMongoCollectionNames
|
||||
public const string Attestations = "vex.attestations";
|
||||
public const string Observations = "vex.observations";
|
||||
public const string Linksets = "vex.linksets";
|
||||
public const string AirgapImports = "vex.airgap_imports";
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -33,6 +33,7 @@ internal static class TestServiceOverrides
|
||||
services.RemoveAll<IVexCacheMaintenance>();
|
||||
services.RemoveAll<IVexAttestationClient>();
|
||||
services.RemoveAll<IVexSigner>();
|
||||
services.RemoveAll<IAirgapImportStore>();
|
||||
|
||||
services.AddSingleton<IVexIngestOrchestrator, StubIngestOrchestrator>();
|
||||
services.AddSingleton<IVexConnectorStateRepository, StubConnectorStateRepository>();
|
||||
@@ -45,6 +46,7 @@ internal static class TestServiceOverrides
|
||||
services.AddSingleton<IVexCacheMaintenance, StubCacheMaintenance>();
|
||||
services.AddSingleton<IVexAttestationClient, StubAttestationClient>();
|
||||
services.AddSingleton<IVexSigner, StubSigner>();
|
||||
services.AddSingleton<IAirgapImportStore, StubAirgapImportStore>();
|
||||
|
||||
services.RemoveAll<IHostedService>();
|
||||
services.AddSingleton<IHostedService, NoopHostedService>();
|
||||
@@ -200,6 +202,17 @@ internal static class TestServiceOverrides
|
||||
public ValueTask<VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken)
|
||||
=> ValueTask.FromResult(new VexSignedPayload("stub-signature", "stub-key"));
|
||||
}
|
||||
|
||||
private sealed class StubAirgapImportStore : IAirgapImportStore
|
||||
{
|
||||
private readonly List<AirgapImportRecord> _records = new();
|
||||
|
||||
public Task SaveAsync(AirgapImportRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_records.Add(record);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class StubIngestOrchestrator : IVexIngestOrchestrator
|
||||
{
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
using StellaOps.Findings.Ledger.WebService.Contracts;
|
||||
using StellaOps.Findings.Ledger.WebService.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Tests.Exports;
|
||||
|
||||
public class ExportFiltersHashTests
|
||||
{
|
||||
private readonly ExportQueryService _service = new(new TestDataSource(), new Microsoft.Extensions.Logging.Abstractions.NullLogger<ExportQueryService>());
|
||||
|
||||
[Fact]
|
||||
public void VexFiltersHash_IsDeterministic()
|
||||
{
|
||||
var requestA = new ExportVexRequest(
|
||||
TenantId: "t1",
|
||||
Shape: "canonical",
|
||||
SinceSequence: 1,
|
||||
UntilSequence: 10,
|
||||
SinceObservedAt: null,
|
||||
UntilObservedAt: null,
|
||||
ProductId: "pkg:purl",
|
||||
AdvisoryId: "ADV-1",
|
||||
Status: "affected",
|
||||
StatementType: "exploitation",
|
||||
PageSize: 100,
|
||||
FiltersHash: string.Empty,
|
||||
PagingKey: null);
|
||||
|
||||
var requestB = requestA with { ProductId = "pkg:purl", AdvisoryId = "ADV-1" };
|
||||
|
||||
var left = _service.ComputeFiltersHash(requestA);
|
||||
var right = _service.ComputeFiltersHash(requestB);
|
||||
|
||||
Assert.Equal(left, right);
|
||||
}
|
||||
|
||||
private sealed class TestDataSource : StellaOps.Findings.Ledger.Infrastructure.Postgres.LedgerDataSource
|
||||
{
|
||||
public TestDataSource() : base(
|
||||
Microsoft.Extensions.Options.Options.Create(new StellaOps.Findings.Ledger.Options.LedgerServiceOptions
|
||||
{
|
||||
Database = { ConnectionString = "Host=localhost;Username=test;Password=test;Database=test" }
|
||||
}),
|
||||
new Microsoft.Extensions.Logging.Abstractions.NullLogger<StellaOps.Findings.Ledger.Infrastructure.Postgres.LedgerDataSource>())
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
namespace StellaOps.Findings.Ledger.WebService.Contracts;
|
||||
|
||||
public sealed record AttestationQueryRequest(
|
||||
string TenantId,
|
||||
string? ArtifactId,
|
||||
string? FindingId,
|
||||
string? AttestationId,
|
||||
string? Status,
|
||||
DateTimeOffset? SinceRecordedAt,
|
||||
DateTimeOffset? UntilRecordedAt,
|
||||
int Limit,
|
||||
string FiltersHash,
|
||||
AttestationPagingKey? PagingKey);
|
||||
|
||||
public sealed record AttestationPagingKey(DateTimeOffset RecordedAt, string AttestationId);
|
||||
|
||||
public sealed record AttestationExportItem(
|
||||
string AttestationId,
|
||||
string ArtifactId,
|
||||
string? FindingId,
|
||||
string VerificationStatus,
|
||||
DateTimeOffset VerificationTime,
|
||||
string DsseDigest,
|
||||
string? RekorEntryId,
|
||||
string? EvidenceBundleRef,
|
||||
string LedgerEventId,
|
||||
DateTimeOffset RecordedAt,
|
||||
string MerkleLeafHash,
|
||||
string RootHash);
|
||||
@@ -17,6 +17,55 @@ public sealed record ExportFindingsRequest(
|
||||
|
||||
public sealed record ExportPagingKey(long SequenceNumber, string PolicyVersion, string CycleHash);
|
||||
|
||||
public sealed record ExportVexRequest(
|
||||
string TenantId,
|
||||
string Shape,
|
||||
long? SinceSequence,
|
||||
long? UntilSequence,
|
||||
DateTimeOffset? SinceObservedAt,
|
||||
DateTimeOffset? UntilObservedAt,
|
||||
string? ProductId,
|
||||
string? AdvisoryId,
|
||||
string? Status,
|
||||
string? StatementType,
|
||||
int PageSize,
|
||||
string FiltersHash,
|
||||
ExportPagingKey? PagingKey);
|
||||
|
||||
public sealed record ExportAdvisoryRequest(
|
||||
string TenantId,
|
||||
string Shape,
|
||||
long? SinceSequence,
|
||||
long? UntilSequence,
|
||||
DateTimeOffset? SinceObservedAt,
|
||||
DateTimeOffset? UntilObservedAt,
|
||||
string? Severity,
|
||||
string? Source,
|
||||
string? CweId,
|
||||
bool? Kev,
|
||||
string? CvssVersion,
|
||||
decimal? CvssScoreMin,
|
||||
decimal? CvssScoreMax,
|
||||
int PageSize,
|
||||
string FiltersHash,
|
||||
ExportPagingKey? PagingKey);
|
||||
|
||||
public sealed record ExportSbomRequest(
|
||||
string TenantId,
|
||||
string Shape,
|
||||
long? SinceSequence,
|
||||
long? UntilSequence,
|
||||
DateTimeOffset? SinceObservedAt,
|
||||
DateTimeOffset? UntilObservedAt,
|
||||
string? SubjectDigest,
|
||||
string? SbomFormat,
|
||||
string? ComponentPurl,
|
||||
bool? ContainsNative,
|
||||
string? SlsaBuildType,
|
||||
int PageSize,
|
||||
string FiltersHash,
|
||||
ExportPagingKey? PagingKey);
|
||||
|
||||
public sealed record FindingExportItem(
|
||||
long EventSequence,
|
||||
DateTimeOffset ObservedAt,
|
||||
|
||||
@@ -151,6 +151,7 @@ builder.Services.AddSingleton<IConsoleCsrfValidator, ConsoleCsrfValidator>();
|
||||
builder.Services.AddHostedService<LedgerMerkleAnchorWorker>();
|
||||
builder.Services.AddHostedService<LedgerProjectionWorker>();
|
||||
builder.Services.AddSingleton<ExportQueryService>();
|
||||
builder.Services.AddSingleton<AttestationQueryService>();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
@@ -290,20 +291,255 @@ app.MapGet("/ledger/export/findings", async Task<Results<FileStreamHttpResult, J
|
||||
.ProducesProblem(StatusCodes.Status403Forbidden)
|
||||
.ProducesProblem(StatusCodes.Status500InternalServerError);
|
||||
|
||||
app.MapGet("/ledger/export/vex", () => TypedResults.Json(new ExportPage<VexExportItem>(Array.Empty<VexExportItem>(), null)))
|
||||
app.MapGet("/v1/ledger/attestations", async Task<Results<FileStreamHttpResult, JsonHttpResult<ExportPage<AttestationExportItem>>, ProblemHttpResult>> (
|
||||
HttpContext httpContext,
|
||||
AttestationQueryService attestationQueryService,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
|
||||
{
|
||||
return tenantProblem!;
|
||||
}
|
||||
|
||||
var request = new AttestationQueryRequest(
|
||||
tenantId,
|
||||
httpContext.Request.Query["artifactId"].ToString(),
|
||||
httpContext.Request.Query["findingId"].ToString(),
|
||||
httpContext.Request.Query["attestationId"].ToString(),
|
||||
httpContext.Request.Query["status"].ToString(),
|
||||
ParseDate(httpContext.Request.Query["sinceRecordedAt"]),
|
||||
ParseDate(httpContext.Request.Query["untilRecordedAt"]),
|
||||
attestationQueryService.ClampLimit(ParseInt(httpContext.Request.Query["limit"])),
|
||||
FiltersHash: string.Empty,
|
||||
PagingKey: null);
|
||||
|
||||
var filtersHash = attestationQueryService.ComputeFiltersHash(request);
|
||||
|
||||
AttestationPagingKey? pagingKey = null;
|
||||
var pageToken = httpContext.Request.Query["page_token"].ToString();
|
||||
if (!string.IsNullOrWhiteSpace(pageToken))
|
||||
{
|
||||
if (!attestationQueryService.TryParsePageToken(pageToken, filtersHash, out pagingKey, out var error))
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: error ?? "invalid_page_token");
|
||||
}
|
||||
}
|
||||
|
||||
request = request with { FiltersHash = filtersHash, PagingKey = pagingKey };
|
||||
|
||||
ExportPage<AttestationExportItem> page;
|
||||
try
|
||||
{
|
||||
page = await attestationQueryService.GetAttestationsAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (InvalidOperationException ex) when (ex.Message == "filters_hash_mismatch")
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "page_token_filters_mismatch");
|
||||
}
|
||||
|
||||
return await WritePagedResponse(httpContext, page, cancellationToken).ConfigureAwait(false);
|
||||
})
|
||||
.WithName("LedgerAttestationsList")
|
||||
.RequireAuthorization(LedgerExportPolicy)
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.ProducesProblem(StatusCodes.Status400BadRequest);
|
||||
|
||||
app.MapGet("/ledger/export/vex", async Task<Results<FileStreamHttpResult, JsonHttpResult<ExportPage<VexExportItem>>, ProblemHttpResult>> (
|
||||
HttpContext httpContext,
|
||||
ExportQueryService exportQueryService,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
|
||||
{
|
||||
return tenantProblem!;
|
||||
}
|
||||
|
||||
var shape = httpContext.Request.Query["shape"].ToString();
|
||||
if (string.IsNullOrWhiteSpace(shape))
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "missing_shape", detail: "shape is required (canonical|compact).");
|
||||
}
|
||||
|
||||
var request = new ExportVexRequest(
|
||||
tenantId,
|
||||
shape,
|
||||
ParseLong(httpContext.Request.Query["since_sequence"]),
|
||||
ParseLong(httpContext.Request.Query["until_sequence"]),
|
||||
ParseDate(httpContext.Request.Query["since_observed_at"]),
|
||||
ParseDate(httpContext.Request.Query["until_observed_at"]),
|
||||
httpContext.Request.Query["product_id"].ToString(),
|
||||
httpContext.Request.Query["advisory_id"].ToString(),
|
||||
httpContext.Request.Query["status"].ToString(),
|
||||
httpContext.Request.Query["statement_type"].ToString(),
|
||||
exportQueryService.ClampPageSize(ParseInt(httpContext.Request.Query["page_size"])),
|
||||
filtersHash: string.Empty,
|
||||
PagingKey: null);
|
||||
|
||||
var filtersHash = exportQueryService.ComputeFiltersHash(request);
|
||||
ExportPagingKey? pagingKey = null;
|
||||
var pageToken = httpContext.Request.Query["page_token"].ToString();
|
||||
if (!string.IsNullOrWhiteSpace(pageToken))
|
||||
{
|
||||
if (!ExportPaging.TryParsePageToken(pageToken, filtersHash, out var parsedKey, out var error))
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: error ?? "invalid_page_token");
|
||||
}
|
||||
|
||||
pagingKey = new ExportPagingKey(parsedKey!.SequenceNumber, parsedKey.PolicyVersion, parsedKey.CycleHash);
|
||||
}
|
||||
|
||||
request = request with { FiltersHash = filtersHash, PagingKey = pagingKey };
|
||||
|
||||
ExportPage<VexExportItem> page;
|
||||
try
|
||||
{
|
||||
page = await exportQueryService.GetVexAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (InvalidOperationException ex) when (ex.Message == "filters_hash_mismatch")
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "page_token_filters_mismatch");
|
||||
}
|
||||
|
||||
return await WritePagedResponse(httpContext, page, cancellationToken).ConfigureAwait(false);
|
||||
})
|
||||
.WithName("LedgerExportVex")
|
||||
.RequireAuthorization(LedgerExportPolicy)
|
||||
.Produces(StatusCodes.Status200OK);
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.ProducesProblem(StatusCodes.Status400BadRequest);
|
||||
|
||||
app.MapGet("/ledger/export/advisories", () => TypedResults.Json(new ExportPage<AdvisoryExportItem>(Array.Empty<AdvisoryExportItem>(), null)))
|
||||
app.MapGet("/ledger/export/advisories", async Task<Results<FileStreamHttpResult, JsonHttpResult<ExportPage<AdvisoryExportItem>>, ProblemHttpResult>> (
|
||||
HttpContext httpContext,
|
||||
ExportQueryService exportQueryService,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
|
||||
{
|
||||
return tenantProblem!;
|
||||
}
|
||||
|
||||
var shape = httpContext.Request.Query["shape"].ToString();
|
||||
if (string.IsNullOrWhiteSpace(shape))
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "missing_shape", detail: "shape is required (canonical|compact).");
|
||||
}
|
||||
|
||||
var kev = ParseBool(httpContext.Request.Query["kev"]);
|
||||
var cvssScoreMin = ParseDecimal(httpContext.Request.Query["cvss_score_min"]);
|
||||
var cvssScoreMax = ParseDecimal(httpContext.Request.Query["cvss_score_max"]);
|
||||
|
||||
var request = new ExportAdvisoryRequest(
|
||||
tenantId,
|
||||
shape,
|
||||
ParseLong(httpContext.Request.Query["since_sequence"]),
|
||||
ParseLong(httpContext.Request.Query["until_sequence"]),
|
||||
ParseDate(httpContext.Request.Query["since_observed_at"]),
|
||||
ParseDate(httpContext.Request.Query["until_observed_at"]),
|
||||
httpContext.Request.Query["severity"].ToString(),
|
||||
httpContext.Request.Query["source"].ToString(),
|
||||
httpContext.Request.Query["cwe_id"].ToString(),
|
||||
kev,
|
||||
httpContext.Request.Query["cvss_version"].ToString(),
|
||||
cvssScoreMin,
|
||||
cvssScoreMax,
|
||||
exportQueryService.ClampPageSize(ParseInt(httpContext.Request.Query["page_size"])),
|
||||
filtersHash: string.Empty,
|
||||
PagingKey: null);
|
||||
|
||||
var filtersHash = exportQueryService.ComputeFiltersHash(request);
|
||||
ExportPagingKey? pagingKey = null;
|
||||
var pageToken = httpContext.Request.Query["page_token"].ToString();
|
||||
if (!string.IsNullOrWhiteSpace(pageToken))
|
||||
{
|
||||
if (!ExportPaging.TryParsePageToken(pageToken, filtersHash, out var parsedKey, out var error))
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: error ?? "invalid_page_token");
|
||||
}
|
||||
|
||||
pagingKey = new ExportPagingKey(parsedKey!.SequenceNumber, parsedKey.PolicyVersion, parsedKey.CycleHash);
|
||||
}
|
||||
|
||||
request = request with { FiltersHash = filtersHash, PagingKey = pagingKey };
|
||||
|
||||
ExportPage<AdvisoryExportItem> page;
|
||||
try
|
||||
{
|
||||
page = await exportQueryService.GetAdvisoriesAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (InvalidOperationException ex) when (ex.Message == "filters_hash_mismatch")
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "page_token_filters_mismatch");
|
||||
}
|
||||
|
||||
return await WritePagedResponse(httpContext, page, cancellationToken).ConfigureAwait(false);
|
||||
})
|
||||
.WithName("LedgerExportAdvisories")
|
||||
.RequireAuthorization(LedgerExportPolicy)
|
||||
.Produces(StatusCodes.Status200OK);
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.ProducesProblem(StatusCodes.Status400BadRequest);
|
||||
|
||||
app.MapGet("/ledger/export/sboms", () => TypedResults.Json(new ExportPage<SbomExportItem>(Array.Empty<SbomExportItem>(), null)))
|
||||
app.MapGet("/ledger/export/sboms", async Task<Results<FileStreamHttpResult, JsonHttpResult<ExportPage<SbomExportItem>>, ProblemHttpResult>> (
|
||||
HttpContext httpContext,
|
||||
ExportQueryService exportQueryService,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
|
||||
{
|
||||
return tenantProblem!;
|
||||
}
|
||||
|
||||
var shape = httpContext.Request.Query["shape"].ToString();
|
||||
if (string.IsNullOrWhiteSpace(shape))
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "missing_shape", detail: "shape is required (canonical|compact).");
|
||||
}
|
||||
|
||||
var request = new ExportSbomRequest(
|
||||
tenantId,
|
||||
shape,
|
||||
ParseLong(httpContext.Request.Query["since_sequence"]),
|
||||
ParseLong(httpContext.Request.Query["until_sequence"]),
|
||||
ParseDate(httpContext.Request.Query["since_observed_at"]),
|
||||
ParseDate(httpContext.Request.Query["until_observed_at"]),
|
||||
httpContext.Request.Query["subject_digest"].ToString(),
|
||||
httpContext.Request.Query["sbom_format"].ToString(),
|
||||
httpContext.Request.Query["component_purl"].ToString(),
|
||||
ParseBool(httpContext.Request.Query["contains_native"]),
|
||||
httpContext.Request.Query["slsa_build_type"].ToString(),
|
||||
exportQueryService.ClampPageSize(ParseInt(httpContext.Request.Query["page_size"])),
|
||||
filtersHash: string.Empty,
|
||||
PagingKey: null);
|
||||
|
||||
var filtersHash = exportQueryService.ComputeFiltersHash(request);
|
||||
ExportPagingKey? pagingKey = null;
|
||||
var pageToken = httpContext.Request.Query["page_token"].ToString();
|
||||
if (!string.IsNullOrWhiteSpace(pageToken))
|
||||
{
|
||||
if (!ExportPaging.TryParsePageToken(pageToken, filtersHash, out var parsedKey, out var error))
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: error ?? "invalid_page_token");
|
||||
}
|
||||
|
||||
pagingKey = new ExportPagingKey(parsedKey!.SequenceNumber, parsedKey.PolicyVersion, parsedKey.CycleHash);
|
||||
}
|
||||
|
||||
request = request with { FiltersHash = filtersHash, PagingKey = pagingKey };
|
||||
|
||||
ExportPage<SbomExportItem> page;
|
||||
try
|
||||
{
|
||||
page = await exportQueryService.GetSbomsAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (InvalidOperationException ex) when (ex.Message == "filters_hash_mismatch")
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "page_token_filters_mismatch");
|
||||
}
|
||||
|
||||
return await WritePagedResponse(httpContext, page, cancellationToken).ConfigureAwait(false);
|
||||
})
|
||||
.WithName("LedgerExportSboms")
|
||||
.RequireAuthorization(LedgerExportPolicy)
|
||||
.Produces(StatusCodes.Status200OK);
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.ProducesProblem(StatusCodes.Status400BadRequest);
|
||||
|
||||
app.MapPost("/internal/ledger/orchestrator-export", async Task<Results<Accepted<OrchestratorExportResponse>, ProblemHttpResult>> (
|
||||
HttpContext httpContext,
|
||||
@@ -394,6 +630,22 @@ app.MapPost("/internal/ledger/airgap-import", async Task<Results<Accepted<Airgap
|
||||
.ProducesProblem(StatusCodes.Status400BadRequest)
|
||||
.ProducesProblem(StatusCodes.Status409Conflict);
|
||||
|
||||
app.MapGet("/.well-known/openapi", () =>
|
||||
{
|
||||
var contentRoot = AppContext.BaseDirectory;
|
||||
var candidate = Path.GetFullPath(Path.Combine(contentRoot, "../../docs/modules/findings-ledger/openapi/findings-ledger.v1.yaml"));
|
||||
if (!File.Exists(candidate))
|
||||
{
|
||||
return Results.Problem(statusCode: StatusCodes.Status500InternalServerError, title: "openapi_missing", detail: "OpenAPI document not found on server.");
|
||||
}
|
||||
|
||||
var yaml = File.ReadAllText(candidate);
|
||||
return Results.Text(yaml, "application/yaml");
|
||||
})
|
||||
.WithName("LedgerOpenApiDocument")
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.ProducesProblem(StatusCodes.Status500InternalServerError);
|
||||
|
||||
app.Run();
|
||||
|
||||
static Created<LedgerEventResponse> CreateCreatedResponse(LedgerEventRecord record)
|
||||
@@ -444,3 +696,42 @@ static async Task<Results<FileStreamHttpResult, JsonHttpResult<ExportPage<T>>, P
|
||||
|
||||
return TypedResults.Json(page);
|
||||
}
|
||||
|
||||
static bool TryGetTenant(HttpContext httpContext, out ProblemHttpResult? problem, out string tenantId)
|
||||
{
|
||||
tenantId = string.Empty;
|
||||
if (!httpContext.Request.Headers.TryGetValue("X-Stella-Tenant", out var tenantValues) || string.IsNullOrWhiteSpace(tenantValues))
|
||||
{
|
||||
problem = TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "missing_tenant");
|
||||
return false;
|
||||
}
|
||||
|
||||
tenantId = tenantValues.ToString();
|
||||
problem = null;
|
||||
return true;
|
||||
}
|
||||
|
||||
static int? ParseInt(string value)
|
||||
{
|
||||
return int.TryParse(value, out var result) ? result : null;
|
||||
}
|
||||
|
||||
static long? ParseLong(string value)
|
||||
{
|
||||
return long.TryParse(value, out var result) ? result : null;
|
||||
}
|
||||
|
||||
static DateTimeOffset? ParseDate(string value)
|
||||
{
|
||||
return DateTimeOffset.TryParse(value, out var result) ? result : null;
|
||||
}
|
||||
|
||||
static decimal? ParseDecimal(string value)
|
||||
{
|
||||
return decimal.TryParse(value, out var result) ? result : null;
|
||||
}
|
||||
|
||||
static bool? ParseBool(string value)
|
||||
{
|
||||
return bool.TryParse(value, out var result) ? result : null;
|
||||
}
|
||||
|
||||
@@ -22,12 +22,6 @@ public sealed class ExportQueryService
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public ExportPage<VexExportItem> GetVexEmpty() => new(Array.Empty<VexExportItem>(), null);
|
||||
|
||||
public ExportPage<AdvisoryExportItem> GetAdvisoriesEmpty() => new(Array.Empty<AdvisoryExportItem>(), null);
|
||||
|
||||
public ExportPage<SbomExportItem> GetSbomsEmpty() => new(Array.Empty<SbomExportItem>(), null);
|
||||
|
||||
public int ClampPageSize(int? requested)
|
||||
{
|
||||
if (!requested.HasValue || requested.Value <= 0)
|
||||
@@ -54,6 +48,64 @@ public sealed class ExportQueryService
|
||||
return ExportPaging.ComputeFiltersHash(filters);
|
||||
}
|
||||
|
||||
public string ComputeFiltersHash(ExportVexRequest request)
|
||||
{
|
||||
var filters = new Dictionary<string, string?>
|
||||
{
|
||||
["shape"] = request.Shape,
|
||||
["since_sequence"] = request.SinceSequence?.ToString(),
|
||||
["until_sequence"] = request.UntilSequence?.ToString(),
|
||||
["since_observed_at"] = request.SinceObservedAt?.ToString("O"),
|
||||
["until_observed_at"] = request.UntilObservedAt?.ToString("O"),
|
||||
["product_id"] = request.ProductId,
|
||||
["advisory_id"] = request.AdvisoryId,
|
||||
["status"] = request.Status,
|
||||
["statement_type"] = request.StatementType
|
||||
};
|
||||
|
||||
return ExportPaging.ComputeFiltersHash(filters);
|
||||
}
|
||||
|
||||
public string ComputeFiltersHash(ExportAdvisoryRequest request)
|
||||
{
|
||||
var filters = new Dictionary<string, string?>
|
||||
{
|
||||
["shape"] = request.Shape,
|
||||
["since_sequence"] = request.SinceSequence?.ToString(),
|
||||
["until_sequence"] = request.UntilSequence?.ToString(),
|
||||
["since_observed_at"] = request.SinceObservedAt?.ToString("O"),
|
||||
["until_observed_at"] = request.UntilObservedAt?.ToString("O"),
|
||||
["severity"] = request.Severity,
|
||||
["source"] = request.Source,
|
||||
["cwe_id"] = request.CweId,
|
||||
["kev"] = request.Kev?.ToString(),
|
||||
["cvss_version"] = request.CvssVersion,
|
||||
["cvss_score_min"] = request.CvssScoreMin?.ToString(),
|
||||
["cvss_score_max"] = request.CvssScoreMax?.ToString()
|
||||
};
|
||||
|
||||
return ExportPaging.ComputeFiltersHash(filters);
|
||||
}
|
||||
|
||||
public string ComputeFiltersHash(ExportSbomRequest request)
|
||||
{
|
||||
var filters = new Dictionary<string, string?>
|
||||
{
|
||||
["shape"] = request.Shape,
|
||||
["since_sequence"] = request.SinceSequence?.ToString(),
|
||||
["until_sequence"] = request.UntilSequence?.ToString(),
|
||||
["since_observed_at"] = request.SinceObservedAt?.ToString("O"),
|
||||
["until_observed_at"] = request.UntilObservedAt?.ToString("O"),
|
||||
["subject_digest"] = request.SubjectDigest,
|
||||
["sbom_format"] = request.SbomFormat,
|
||||
["component_purl"] = request.ComponentPurl,
|
||||
["contains_native"] = request.ContainsNative?.ToString(),
|
||||
["slsa_build_type"] = request.SlsaBuildType
|
||||
};
|
||||
|
||||
return ExportPaging.ComputeFiltersHash(filters);
|
||||
}
|
||||
|
||||
public async Task<ExportPage<FindingExportItem>> GetFindingsAsync(ExportFindingsRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
@@ -211,4 +263,41 @@ public sealed class ExportQueryService
|
||||
|
||||
return new ExportPage<FindingExportItem>(items, nextPageToken);
|
||||
}
|
||||
|
||||
public Task<ExportPage<VexExportItem>> GetVexAsync(ExportVexRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
if (!string.Equals(request.FiltersHash, ComputeFiltersHash(request), StringComparison.Ordinal))
|
||||
{
|
||||
throw new InvalidOperationException("filters_hash_mismatch");
|
||||
}
|
||||
|
||||
// Data source to be implemented; deterministic empty page for now.
|
||||
return Task.FromResult(new ExportPage<VexExportItem>(Array.Empty<VexExportItem>(), null));
|
||||
}
|
||||
|
||||
public Task<ExportPage<AdvisoryExportItem>> GetAdvisoriesAsync(ExportAdvisoryRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
if (!string.Equals(request.FiltersHash, ComputeFiltersHash(request), StringComparison.Ordinal))
|
||||
{
|
||||
throw new InvalidOperationException("filters_hash_mismatch");
|
||||
}
|
||||
|
||||
return Task.FromResult(new ExportPage<AdvisoryExportItem>(Array.Empty<AdvisoryExportItem>(), null));
|
||||
}
|
||||
|
||||
public Task<ExportPage<SbomExportItem>> GetSbomsAsync(ExportSbomRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
if (!string.Equals(request.FiltersHash, ComputeFiltersHash(request), StringComparison.Ordinal))
|
||||
{
|
||||
throw new InvalidOperationException("filters_hash_mismatch");
|
||||
}
|
||||
|
||||
return Task.FromResult(new ExportPage<SbomExportItem>(Array.Empty<SbomExportItem>(), null));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,3 +7,9 @@
|
||||
| LEDGER-AIRGAP-56-001 | DONE | Mirror bundle provenance recording | 2025-11-22 |
|
||||
|
||||
Status changes must be mirrored in `docs/implplan/SPRINT_0120_0000_0001_policy_reasoning.md`.
|
||||
|
||||
# Findings Ledger · Sprint 0121-0001-0001
|
||||
|
||||
| Task ID | Status | Notes | Updated (UTC) |
|
||||
| --- | --- | --- | --- |
|
||||
| LEDGER-OBS-54-001 | DONE | Implemented `/v1/ledger/attestations` with deterministic paging, filter hash guard, and schema/OpenAPI updates. | 2025-11-22 |
|
||||
|
||||
@@ -25,12 +25,13 @@ Project SBOM, advisory, VEX, and policy overlay data into a tenant-scoped proper
|
||||
- .NET 10 preview workers (HostedService + channel pipelines).
|
||||
- MongoDB for node/edge storage; S3-compatible buckets for layout tiles/snapshots if needed.
|
||||
- Scheduler integration (jobs, change streams) to handle incremental updates.
|
||||
- Analytics: clustering/centrality pipelines with Mongo-backed snapshot provider and overlays; change-stream/backfill worker with idempotency store (Mongo or in-memory) and retry/backoff.
|
||||
|
||||
## Definition of Done
|
||||
- Pipelines deterministic and tested; fixtures validated.
|
||||
- Metrics/logs/traces wired with tenant context.
|
||||
- Schema docs + OpenAPI (where applicable) updated; compliance checklist appended.
|
||||
- Offline kit includes seed data for air-gapped installs.
|
||||
- Offline kit includes seed data for air-gapped installs, including analytics overlays (`overlays/*.ndjson` with manifest) ordered deterministically.
|
||||
|
||||
## Required Reading
|
||||
- `docs/modules/graph/architecture.md`
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
namespace StellaOps.Graph.Indexer.Infrastructure;
|
||||
|
||||
public sealed class MongoDatabaseOptions
|
||||
{
|
||||
public string ConnectionString { get; set; } = string.Empty;
|
||||
public string DatabaseName { get; set; } = "stellaops-graph";
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Driver;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Infrastructure;
|
||||
|
||||
public static class MongoServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddGraphMongoDatabase(
|
||||
this IServiceCollection services,
|
||||
Action<MongoDatabaseOptions> configure)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configure);
|
||||
|
||||
services.Configure(configure);
|
||||
|
||||
services.AddSingleton<IMongoClient>(sp =>
|
||||
{
|
||||
var opts = sp.GetRequiredService<IOptions<MongoDatabaseOptions>>().Value;
|
||||
Validate(opts);
|
||||
return new MongoClient(opts.ConnectionString);
|
||||
});
|
||||
|
||||
services.AddSingleton<IMongoDatabase>(sp =>
|
||||
{
|
||||
var opts = sp.GetRequiredService<IOptions<MongoDatabaseOptions>>().Value;
|
||||
Validate(opts);
|
||||
return sp.GetRequiredService<IMongoClient>().GetDatabase(opts.DatabaseName);
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
private static void Validate(MongoDatabaseOptions options)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(options.ConnectionString))
|
||||
{
|
||||
throw new InvalidOperationException("Mongo connection string must be provided.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(options.DatabaseName))
|
||||
{
|
||||
throw new InvalidOperationException("Mongo database name must be provided.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,5 +15,6 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
|
||||
<PackageReference Include="MongoDB.Bson" Version="3.5.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,125 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Nodes;
|
||||
using Mongo2Go;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Graph.Indexer.Analytics;
|
||||
using StellaOps.Graph.Indexer.Incremental;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Tests;
|
||||
|
||||
public sealed class MongoProviderIntegrationTests : IAsyncLifetime
|
||||
{
|
||||
private readonly MongoDbRunner _runner;
|
||||
private IMongoDatabase _database = default!;
|
||||
|
||||
public MongoProviderIntegrationTests()
|
||||
{
|
||||
_runner = MongoDbRunner.Start(singleNodeReplSet: true);
|
||||
}
|
||||
|
||||
public Task InitializeAsync()
|
||||
{
|
||||
var client = new MongoClient(_runner.ConnectionString);
|
||||
_database = client.GetDatabase("graph-indexer-tests");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
{
|
||||
_runner.Dispose();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SnapshotProvider_ReadsPendingSnapshots()
|
||||
{
|
||||
var snapshots = _database.GetCollection<BsonDocument>("graph_snapshots");
|
||||
var nodes = new BsonArray
|
||||
{
|
||||
new BsonDocument
|
||||
{
|
||||
{ "id", "gn:tenant-a:component:1" },
|
||||
{ "kind", "component" },
|
||||
{ "attributes", new BsonDocument { { "purl", "pkg:npm/a@1.0.0" } } }
|
||||
}
|
||||
};
|
||||
|
||||
var edges = new BsonArray();
|
||||
|
||||
await snapshots.InsertOneAsync(new BsonDocument
|
||||
{
|
||||
{ "tenant", "tenant-a" },
|
||||
{ "snapshot_id", "snap-1" },
|
||||
{ "generated_at", DateTime.UtcNow },
|
||||
{ "nodes", nodes },
|
||||
{ "edges", edges }
|
||||
});
|
||||
|
||||
var provider = new MongoGraphSnapshotProvider(_database);
|
||||
var pending = await provider.GetPendingSnapshotsAsync(CancellationToken.None);
|
||||
|
||||
Assert.Single(pending);
|
||||
Assert.Equal("snap-1", pending[0].SnapshotId);
|
||||
Assert.Single(pending[0].Nodes);
|
||||
|
||||
await provider.MarkProcessedAsync("tenant-a", "snap-1", CancellationToken.None);
|
||||
var none = await provider.GetPendingSnapshotsAsync(CancellationToken.None);
|
||||
Assert.Empty(none);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChangeEventSource_EnumeratesAndHonorsIdempotency()
|
||||
{
|
||||
var changes = _database.GetCollection<BsonDocument>("graph_change_events");
|
||||
await changes.InsertManyAsync(new[]
|
||||
{
|
||||
new BsonDocument
|
||||
{
|
||||
{ "tenant", "tenant-a" },
|
||||
{ "snapshot_id", "snap-1" },
|
||||
{ "sequence_token", "seq-1" },
|
||||
{ "is_backfill", false },
|
||||
{ "nodes", new BsonArray { new BsonDocument { { "id", "gn:1" }, { "kind", "component" } } } },
|
||||
{ "edges", new BsonArray() }
|
||||
},
|
||||
new BsonDocument
|
||||
{
|
||||
{ "tenant", "tenant-a" },
|
||||
{ "snapshot_id", "snap-1" },
|
||||
{ "sequence_token", "seq-2" },
|
||||
{ "is_backfill", false },
|
||||
{ "nodes", new BsonArray { new BsonDocument { { "id", "gn:2" }, { "kind", "component" } } } },
|
||||
{ "edges", new BsonArray() }
|
||||
}
|
||||
});
|
||||
|
||||
var source = new MongoGraphChangeEventSource(_database);
|
||||
var idempotency = new MongoIdempotencyStore(_database);
|
||||
|
||||
var events = new List<GraphChangeEvent>();
|
||||
await foreach (var change in source.ReadAsync(CancellationToken.None))
|
||||
{
|
||||
if (await idempotency.HasSeenAsync(change.SequenceToken, CancellationToken.None))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
events.Add(change);
|
||||
await idempotency.MarkSeenAsync(change.SequenceToken, CancellationToken.None);
|
||||
}
|
||||
|
||||
Assert.Equal(2, events.Count);
|
||||
|
||||
var secondPass = new List<GraphChangeEvent>();
|
||||
await foreach (var change in source.ReadAsync(CancellationToken.None))
|
||||
{
|
||||
if (!await idempotency.HasSeenAsync(change.SequenceToken, CancellationToken.None))
|
||||
{
|
||||
secondPass.Add(change);
|
||||
}
|
||||
}
|
||||
|
||||
Assert.Empty(secondPass);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Graph.Indexer.Infrastructure;
|
||||
using Mongo2Go;
|
||||
using MongoDB.Driver;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Tests;
|
||||
|
||||
public sealed class MongoServiceCollectionExtensionsTests : IAsyncLifetime
|
||||
{
|
||||
private MongoDbRunner _runner = default!;
|
||||
|
||||
public Task InitializeAsync()
|
||||
{
|
||||
_runner = MongoDbRunner.Start(singleNodeReplSet: true);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
{
|
||||
_runner.Dispose();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddGraphMongoDatabase_RegistersClientAndDatabase()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
|
||||
services.AddGraphMongoDatabase(options =>
|
||||
{
|
||||
options.ConnectionString = _runner.ConnectionString;
|
||||
options.DatabaseName = "graph-indexer-ext-tests";
|
||||
});
|
||||
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var client = provider.GetService<IMongoClient>();
|
||||
var database = provider.GetService<IMongoDatabase>();
|
||||
|
||||
Assert.NotNull(client);
|
||||
Assert.NotNull(database);
|
||||
Assert.Equal("graph-indexer-ext-tests", database!.DatabaseNamespace.DatabaseName);
|
||||
}
|
||||
}
|
||||
@@ -12,5 +12,6 @@
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
|
||||
<PackageReference Include="Mongo2Go" Version="3.1.3" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -21,6 +21,9 @@ public sealed class DenoLanguageAnalyzer : ILanguageAnalyzer
|
||||
|
||||
await TryWriteRuntimeShimAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Optional runtime capture: executes only when STELLA_DENO_ENTRYPOINT is provided.
|
||||
await DenoRuntimeTraceRunner.TryExecuteAsync(context, logger: null, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var workspace = await DenoWorkspaceNormalizer.NormalizeAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
var moduleGraph = DenoModuleGraphResolver.Resolve(workspace, cancellationToken);
|
||||
var compatibility = DenoNpmCompatibilityAdapter.Analyze(workspace, moduleGraph, cancellationToken);
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Runtime.InteropServices;
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Deno.Internal.Runtime;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Deno.Tests.TestUtilities;
|
||||
@@ -74,23 +75,18 @@ public sealed class DenoRuntimeTraceRunnerTests
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExecutesShimAndWritesRuntime_WhenDenoPresent()
|
||||
public async Task ExecutesShimAndWritesRuntime_WithStubbedDeno()
|
||||
{
|
||||
var binary = DenoBinaryLocator.Find();
|
||||
if (string.IsNullOrWhiteSpace(binary))
|
||||
{
|
||||
return; // gracefully skip when deno is unavailable in the environment
|
||||
}
|
||||
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var stub = CreateStubDeno(root);
|
||||
var entry = Path.Combine(root, "main.ts");
|
||||
var fixture = Path.Combine(TestPaths.GetProjectRoot(), "TestFixtures/deno-runtime/simple/main.ts");
|
||||
File.Copy(fixture, entry);
|
||||
|
||||
using var entryEnv = new EnvironmentVariableScope("STELLA_DENO_ENTRYPOINT", "main.ts");
|
||||
using var binaryEnv = new EnvironmentVariableScope("STELLA_DENO_BINARY", binary);
|
||||
using var binaryEnv = new EnvironmentVariableScope("STELLA_DENO_BINARY", stub);
|
||||
using var denoDirEnv = new EnvironmentVariableScope("DENO_DIR", Path.Combine(root, ".deno-cache"));
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
@@ -111,6 +107,46 @@ public sealed class DenoRuntimeTraceRunnerTests
|
||||
}
|
||||
}
|
||||
|
||||
private static string CreateStubDeno(string root)
|
||||
{
|
||||
var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
|
||||
var fileName = isWindows ? "deno.cmd" : "deno";
|
||||
var path = Path.Combine(root, fileName);
|
||||
|
||||
if (isWindows)
|
||||
{
|
||||
var lines = new[]
|
||||
{
|
||||
"@echo off",
|
||||
"echo {\"type\":\"deno.runtime.start\",\"ts\":\"2025-01-01T00:00:00Z\",\"module\":{\"normalized\":\".\",\"path_sha256\":\"0\"},\"reason\":\"shim-start\"}>deno-runtime.ndjson",
|
||||
"echo {\"type\":\"deno.module.load\",\"ts\":\"2025-01-01T00:00:01Z\",\"module\":{\"normalized\":\"main.ts\",\"path_sha256\":\"abc\"},\"reason\":\"static-import\",\"permissions\":[] }>>deno-runtime.ndjson",
|
||||
"exit /b 0"
|
||||
};
|
||||
File.WriteAllLines(path, lines);
|
||||
}
|
||||
else
|
||||
{
|
||||
var script = """#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
cat > deno-runtime.ndjson <<'EOF'
|
||||
{"type":"deno.runtime.start","ts":"2025-01-01T00:00:00Z","module":{"normalized":".","path_sha256":"0"},"reason":"shim-start"}
|
||||
{"type":"deno.module.load","ts":"2025-01-01T00:00:01Z","module":{"normalized":"main.ts","path_sha256":"abc"},"reason":"static-import","permissions":[]}
|
||||
EOF
|
||||
""";
|
||||
File.WriteAllText(path, script);
|
||||
try
|
||||
{
|
||||
System.Diagnostics.Process.Start("chmod", $"+x {path}")?.WaitForExit();
|
||||
}
|
||||
catch
|
||||
{
|
||||
// best effort; on Windows this branch won't execute
|
||||
}
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
private sealed class EnvironmentVariableScope : IDisposable
|
||||
{
|
||||
private readonly string _name;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.IO;
|
||||
using StellaOps.Zastava.Core.Configuration;
|
||||
|
||||
namespace StellaOps.Zastava.Observer.Configuration;
|
||||
|
||||
@@ -90,6 +91,12 @@ public sealed class ZastavaObserverOptions
|
||||
[Required]
|
||||
public ZastavaObserverPostureOptions Posture { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Surface secret names used by the observer.
|
||||
/// </summary>
|
||||
[Required]
|
||||
public ZastavaSurfaceSecretsOptions Secrets { get; init; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Root path for accessing host process information (defaults to /host/proc).
|
||||
/// </summary>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using StellaOps.Zastava.Core.Configuration;
|
||||
|
||||
namespace StellaOps.Zastava.Webhook.Configuration;
|
||||
|
||||
@@ -17,7 +18,10 @@ public sealed class ZastavaWebhookOptions
|
||||
|
||||
[Required]
|
||||
public ZastavaWebhookBackendOptions Backend { get; init; } = new();
|
||||
}
|
||||
|
||||
[Required]
|
||||
public ZastavaSurfaceSecretsOptions Secrets { get; init; } = new();
|
||||
}
|
||||
|
||||
public sealed class ZastavaWebhookAdmissionOptions
|
||||
{
|
||||
|
||||
@@ -12,7 +12,7 @@ internal interface IWebhookSurfaceFsClient
|
||||
internal sealed class WebhookSurfaceFsClient : IWebhookSurfaceFsClient
|
||||
{
|
||||
private readonly ISurfaceManifestReader _manifestReader;
|
||||
private readonly SurfaceManifestPathBuilder _pathBuilder;
|
||||
private readonly SurfaceManifestStoreOptions _storeOptions;
|
||||
private readonly IOptions<ZastavaRuntimeOptions> _runtimeOptions;
|
||||
|
||||
public WebhookSurfaceFsClient(
|
||||
@@ -34,7 +34,7 @@ internal sealed class WebhookSurfaceFsClient : IWebhookSurfaceFsClient
|
||||
throw new ArgumentNullException(nameof(storeOptions));
|
||||
}
|
||||
|
||||
_pathBuilder = new SurfaceManifestPathBuilder(cacheOptions.Value, storeOptions.Value);
|
||||
_storeOptions = storeOptions.Value ?? throw new ArgumentNullException(nameof(storeOptions.Value));
|
||||
}
|
||||
|
||||
public async Task<(bool Found, string? ManifestUri)> TryGetManifestAsync(string manifestDigest, CancellationToken cancellationToken = default)
|
||||
@@ -57,9 +57,36 @@ internal sealed class WebhookSurfaceFsClient : IWebhookSurfaceFsClient
|
||||
? manifest.Tenant
|
||||
: _runtimeOptions.Value.Tenant;
|
||||
|
||||
var digestHex = SurfaceManifestPathBuilder.EnsureSha256Digest(manifestDigest); // strips sha256:
|
||||
var uri = _pathBuilder.BuildManifestUri(tenant, digestHex);
|
||||
var digestHex = EnsureSha256Digest(manifestDigest); // strips sha256:
|
||||
var uri = BuildManifestUri(_storeOptions, tenant, digestHex);
|
||||
|
||||
return (true, uri);
|
||||
}
|
||||
|
||||
private static string BuildManifestUri(SurfaceManifestStoreOptions storeOptions, string tenant, string digestHex)
|
||||
{
|
||||
var tenantSegment = SanitizeTenant(tenant);
|
||||
return $"{storeOptions.Scheme}://{storeOptions.Bucket}/{storeOptions.Prefix}/{tenantSegment}/{digestHex[..2]}/{digestHex[2..4]}/{digestHex}.json";
|
||||
}
|
||||
|
||||
private static string SanitizeTenant(string value)
|
||||
=> string.IsNullOrWhiteSpace(value)
|
||||
? "default"
|
||||
: value.Replace('/', '_').Replace('\\', '_');
|
||||
|
||||
private static string EnsureSha256Digest(string manifestDigest)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(manifestDigest))
|
||||
{
|
||||
throw new ArgumentException("Digest cannot be null or empty.", nameof(manifestDigest));
|
||||
}
|
||||
|
||||
const string prefix = "sha256:";
|
||||
if (!manifestDigest.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new ArgumentException("Only sha256 digests are supported.", nameof(manifestDigest));
|
||||
}
|
||||
|
||||
return manifestDigest[prefix.Length..].ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
|
||||
namespace StellaOps.Zastava.Core.Configuration;
|
||||
|
||||
/// <summary>
|
||||
/// Shared secret naming conventions for Zastava components when resolving Surface secrets.
|
||||
/// </summary>
|
||||
public sealed class ZastavaSurfaceSecretsOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Secret name used for CAS access credentials (observer only).
|
||||
/// </summary>
|
||||
[Required(AllowEmptyStrings = false)]
|
||||
public string CasAccessName { get; init; } = "cas-access";
|
||||
|
||||
/// <summary>
|
||||
/// Secret name used for attestation verification keys.
|
||||
/// </summary>
|
||||
[Required(AllowEmptyStrings = false)]
|
||||
public string AttestationName { get; init; } = "attestation";
|
||||
}
|
||||
@@ -71,7 +71,9 @@ public sealed class AdmissionResponseBuilderTests
|
||||
Assert.NotNull(response.Response.AuditAnnotations);
|
||||
Assert.True(envelope.Decision.Images.First().HasSbomReferrers);
|
||||
Assert.StartsWith("sha256-", envelope.Decision.PodSpecDigest, StringComparison.Ordinal);
|
||||
Assert.Equal("cas://surface-cache/manifests/tenant-a/abcd.json", envelope.Decision.Images.First().Metadata["surfacePointer"]);
|
||||
var metadata = envelope.Decision.Images.First().Metadata;
|
||||
Assert.NotNull(metadata);
|
||||
Assert.Equal("cas://surface-cache/manifests/tenant-a/abcd.json", metadata["surfacePointer"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
Reference in New Issue
Block a user