CD/CD consolidation
This commit is contained in:
30
devops/tools/__fixtures__/api-compat/new.yaml
Normal file
30
devops/tools/__fixtures__/api-compat/new.yaml
Normal file
@@ -0,0 +1,30 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: Demo API
|
||||
version: 1.1.0
|
||||
paths:
|
||||
/foo:
|
||||
get:
|
||||
parameters:
|
||||
- in: query
|
||||
name: tenant
|
||||
required: true
|
||||
responses:
|
||||
"201":
|
||||
description: created
|
||||
/bar:
|
||||
get:
|
||||
responses:
|
||||
"200":
|
||||
description: ok
|
||||
/baz:
|
||||
post:
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
responses:
|
||||
"201":
|
||||
description: created
|
||||
29
devops/tools/__fixtures__/api-compat/old.yaml
Normal file
29
devops/tools/__fixtures__/api-compat/old.yaml
Normal file
@@ -0,0 +1,29 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: Demo API
|
||||
version: 1.0.0
|
||||
paths:
|
||||
/foo:
|
||||
get:
|
||||
parameters:
|
||||
- in: query
|
||||
name: filter
|
||||
required: false
|
||||
responses:
|
||||
"200":
|
||||
description: ok
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
/baz:
|
||||
post:
|
||||
requestBody:
|
||||
required: false
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
responses:
|
||||
"201":
|
||||
description: created
|
||||
110
devops/tools/add_blocked_reference.py
Normal file
110
devops/tools/add_blocked_reference.py
Normal file
@@ -0,0 +1,110 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Add BLOCKED dependency tree reference to all sprint files.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
DOCS_DIR = Path(__file__).parent.parent / "docs"
|
||||
IMPLPLAN_DIR = DOCS_DIR / "implplan"
|
||||
ROUTER_DIR = DOCS_DIR / "router"
|
||||
|
||||
# Reference lines with correct relative paths
|
||||
REFERENCE_LINE_IMPLPLAN = "\n> **BLOCKED Tasks:** Before working on BLOCKED tasks, review [BLOCKED_DEPENDENCY_TREE.md](./BLOCKED_DEPENDENCY_TREE.md) for root blockers and dependencies.\n"
|
||||
REFERENCE_LINE_ROUTER = "\n> **BLOCKED Tasks:** Before working on BLOCKED tasks, review [../implplan/BLOCKED_DEPENDENCY_TREE.md](../implplan/BLOCKED_DEPENDENCY_TREE.md) for root blockers and dependencies.\n"
|
||||
|
||||
def add_reference_to_sprint(filepath: Path, reference_line: str) -> bool:
|
||||
"""Add BLOCKED reference to a sprint file. Returns True if modified."""
|
||||
content = filepath.read_text(encoding="utf-8")
|
||||
|
||||
# Skip if reference already exists
|
||||
if "BLOCKED_DEPENDENCY_TREE.md" in content:
|
||||
return False
|
||||
|
||||
# Find the best insertion point
|
||||
# Priority 1: After "## Documentation Prerequisites" section (before next ##)
|
||||
# Priority 2: After "## Dependencies & Concurrency" section
|
||||
# Priority 3: After the first line (title)
|
||||
|
||||
lines = content.split("\n")
|
||||
insert_index = None
|
||||
|
||||
# Look for Documentation Prerequisites section
|
||||
for i, line in enumerate(lines):
|
||||
if line.strip().startswith("## Documentation Prerequisites"):
|
||||
# Find the next section header or end of list
|
||||
for j in range(i + 1, len(lines)):
|
||||
if lines[j].strip().startswith("## "):
|
||||
insert_index = j
|
||||
break
|
||||
elif lines[j].strip() == "" and j + 1 < len(lines) and lines[j + 1].strip().startswith("## "):
|
||||
insert_index = j + 1
|
||||
break
|
||||
if insert_index is None:
|
||||
# No next section found, insert after last non-empty line in prerequisites
|
||||
for j in range(i + 1, len(lines)):
|
||||
if lines[j].strip().startswith("## "):
|
||||
insert_index = j
|
||||
break
|
||||
break
|
||||
|
||||
# Fallback: after Dependencies & Concurrency
|
||||
if insert_index is None:
|
||||
for i, line in enumerate(lines):
|
||||
if line.strip().startswith("## Dependencies"):
|
||||
for j in range(i + 1, len(lines)):
|
||||
if lines[j].strip().startswith("## "):
|
||||
insert_index = j
|
||||
break
|
||||
break
|
||||
|
||||
# Fallback: after first heading
|
||||
if insert_index is None:
|
||||
for i, line in enumerate(lines):
|
||||
if line.strip().startswith("# "):
|
||||
insert_index = i + 2 # After title and blank line
|
||||
break
|
||||
|
||||
# Final fallback: beginning of file
|
||||
if insert_index is None:
|
||||
insert_index = 1
|
||||
|
||||
# Insert the reference
|
||||
new_lines = lines[:insert_index] + [reference_line.strip(), ""] + lines[insert_index:]
|
||||
new_content = "\n".join(new_lines)
|
||||
|
||||
filepath.write_text(new_content, encoding="utf-8")
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
modified = 0
|
||||
skipped = 0
|
||||
|
||||
# Process implplan directory
|
||||
print("Processing docs/implplan...")
|
||||
for filepath in sorted(IMPLPLAN_DIR.glob("SPRINT_*.md")):
|
||||
if add_reference_to_sprint(filepath, REFERENCE_LINE_IMPLPLAN):
|
||||
print(f"Modified: {filepath.name}")
|
||||
modified += 1
|
||||
else:
|
||||
print(f"Skipped: {filepath.name}")
|
||||
skipped += 1
|
||||
|
||||
# Process router directory
|
||||
print("\nProcessing docs/router...")
|
||||
for filepath in sorted(ROUTER_DIR.glob("SPRINT_*.md")):
|
||||
if add_reference_to_sprint(filepath, REFERENCE_LINE_ROUTER):
|
||||
print(f"Modified: {filepath.name}")
|
||||
modified += 1
|
||||
else:
|
||||
print(f"Skipped: {filepath.name}")
|
||||
skipped += 1
|
||||
|
||||
print(f"\nSummary: {modified} files modified, {skipped} files skipped")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
32
devops/tools/airgap/verify-offline-kit.sh
Normal file
32
devops/tools/airgap/verify-offline-kit.sh
Normal file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Minimal verifier sample for AIRGAP-VERIFY-510-014. Adjust paths to your kit.
|
||||
|
||||
KIT_ROOT=${1:-./offline}
|
||||
MANIFEST="$KIT_ROOT/manifest.json"
|
||||
SIG="$KIT_ROOT/manifest.dsse"
|
||||
|
||||
echo "[*] Verifying manifest signature..."
|
||||
cosign verify-blob --key trust-roots/manifest.pub --signature "$SIG" "$MANIFEST"
|
||||
|
||||
echo "[*] Checking chunk hashes..."
|
||||
python - <<'PY'
|
||||
import json, hashlib, sys, os
|
||||
manifest_path=os.environ.get('MANIFEST') or sys.argv[1]
|
||||
with open(manifest_path) as f:
|
||||
data=json.load(f)
|
||||
ok=True
|
||||
for entry in data.get('chunks', []):
|
||||
path=os.path.join(os.path.dirname(manifest_path), entry['path'])
|
||||
h=hashlib.sha256()
|
||||
with open(path,'rb') as fh:
|
||||
h.update(fh.read())
|
||||
if h.hexdigest()!=entry['sha256']:
|
||||
ok=False
|
||||
print(f"HASH MISMATCH {entry['path']}")
|
||||
if not ok:
|
||||
sys.exit(4)
|
||||
PY
|
||||
|
||||
echo "[*] Done."
|
||||
129
devops/tools/api-compat/api-changelog.mjs
Normal file
129
devops/tools/api-compat/api-changelog.mjs
Normal file
@@ -0,0 +1,129 @@
|
||||
#!/usr/bin/env node
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import crypto from 'node:crypto';
|
||||
import yaml from 'yaml';
|
||||
|
||||
const ROOT = path.resolve('src/Api/StellaOps.Api.OpenApi');
|
||||
const BASELINE = path.join(ROOT, 'baselines', 'stella-baseline.yaml');
|
||||
const CURRENT = path.join(ROOT, 'stella.yaml');
|
||||
const OUTPUT = path.join(ROOT, 'CHANGELOG.md');
|
||||
const RELEASE_OUT = path.resolve('src/Sdk/StellaOps.Sdk.Release/out/api-changelog');
|
||||
|
||||
function panic(message) {
|
||||
console.error(`[api:changelog] ${message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function loadSpec(file) {
|
||||
if (!fs.existsSync(file)) {
|
||||
panic(`Spec not found: ${file}`);
|
||||
}
|
||||
return yaml.parse(fs.readFileSync(file, 'utf8'));
|
||||
}
|
||||
|
||||
function enumerateOps(spec) {
|
||||
const ops = new Map();
|
||||
for (const [route, methods] of Object.entries(spec.paths || {})) {
|
||||
for (const [method, operation] of Object.entries(methods || {})) {
|
||||
const lower = method.toLowerCase();
|
||||
if (!['get','post','put','delete','patch','head','options','trace'].includes(lower)) continue;
|
||||
const id = `${lower.toUpperCase()} ${route}`;
|
||||
ops.set(id, operation || {});
|
||||
}
|
||||
}
|
||||
return ops;
|
||||
}
|
||||
|
||||
function diffSpecs(oldSpec, newSpec) {
|
||||
const oldOps = enumerateOps(oldSpec);
|
||||
const newOps = enumerateOps(newSpec);
|
||||
const additive = [];
|
||||
const breaking = [];
|
||||
|
||||
for (const id of newOps.keys()) {
|
||||
if (!oldOps.has(id)) {
|
||||
additive.push(id);
|
||||
}
|
||||
}
|
||||
for (const id of oldOps.keys()) {
|
||||
if (!newOps.has(id)) {
|
||||
breaking.push(id);
|
||||
}
|
||||
}
|
||||
return { additive: additive.sort(), breaking: breaking.sort() };
|
||||
}
|
||||
|
||||
function renderMarkdown(diff) {
|
||||
const lines = [];
|
||||
lines.push('# API Changelog');
|
||||
lines.push('');
|
||||
const date = new Date().toISOString();
|
||||
lines.push(`Generated: ${date}`);
|
||||
lines.push('');
|
||||
lines.push('## Additive Operations');
|
||||
if (diff.additive.length === 0) {
|
||||
lines.push('- None');
|
||||
} else {
|
||||
diff.additive.forEach((op) => lines.push(`- ${op}`));
|
||||
}
|
||||
lines.push('');
|
||||
lines.push('## Breaking Operations');
|
||||
if (diff.breaking.length === 0) {
|
||||
lines.push('- None');
|
||||
} else {
|
||||
diff.breaking.forEach((op) => lines.push(`- ${op}`));
|
||||
}
|
||||
lines.push('');
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function ensureReleaseDir() {
|
||||
fs.mkdirSync(RELEASE_OUT, { recursive: true });
|
||||
}
|
||||
|
||||
function sha256(content) {
|
||||
return crypto.createHash('sha256').update(content).digest('hex');
|
||||
}
|
||||
|
||||
function signDigest(digest) {
|
||||
const key = process.env.API_CHANGELOG_SIGNING_KEY;
|
||||
if (!key) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const hmac = crypto.createHmac('sha256', Buffer.from(key, 'utf8'));
|
||||
hmac.update(digest);
|
||||
return hmac.digest('hex');
|
||||
}
|
||||
|
||||
function main() {
|
||||
if (!fs.existsSync(BASELINE)) {
|
||||
console.log('[api:changelog] baseline missing; skipping');
|
||||
return;
|
||||
}
|
||||
const diff = diffSpecs(loadSpec(BASELINE), loadSpec(CURRENT));
|
||||
const markdown = renderMarkdown(diff);
|
||||
fs.writeFileSync(OUTPUT, markdown, 'utf8');
|
||||
console.log(`[api:changelog] wrote changelog to ${OUTPUT}`);
|
||||
|
||||
ensureReleaseDir();
|
||||
const releaseChangelog = path.join(RELEASE_OUT, 'CHANGELOG.md');
|
||||
fs.writeFileSync(releaseChangelog, markdown, 'utf8');
|
||||
|
||||
const digest = sha256(markdown);
|
||||
const digestFile = path.join(RELEASE_OUT, 'CHANGELOG.sha256');
|
||||
fs.writeFileSync(digestFile, `${digest} CHANGELOG.md\n`, 'utf8');
|
||||
|
||||
const signature = signDigest(digest);
|
||||
if (signature) {
|
||||
fs.writeFileSync(path.join(RELEASE_OUT, 'CHANGELOG.sig'), signature, 'utf8');
|
||||
console.log('[api:changelog] wrote signature for release artifact');
|
||||
} else {
|
||||
console.log('[api:changelog] signature skipped (API_CHANGELOG_SIGNING_KEY not set)');
|
||||
}
|
||||
|
||||
console.log(`[api:changelog] copied changelog + digest to ${RELEASE_OUT}`);
|
||||
}
|
||||
|
||||
main();
|
||||
104
devops/tools/api-compat/api-compat-changelog.mjs
Normal file
104
devops/tools/api-compat/api-compat-changelog.mjs
Normal file
@@ -0,0 +1,104 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Generate a Markdown changelog from two OpenAPI specs using the api-compat-diff tool.
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/api-compat-changelog.mjs <oldSpec> <newSpec> [--title "Release X"] [--fail-on-breaking]
|
||||
*
|
||||
* Output is written to stdout.
|
||||
*/
|
||||
import { execFileSync } from 'child_process';
|
||||
import process from 'process';
|
||||
import path from 'path';
|
||||
|
||||
function panic(message) {
|
||||
console.error(`[api-compat-changelog] ${message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function parseArgs(argv) {
|
||||
const args = argv.slice(2);
|
||||
if (args.length < 2) {
|
||||
panic('Usage: node scripts/api-compat-changelog.mjs <oldSpec> <newSpec> [--title "Release X"] [--fail-on-breaking]');
|
||||
}
|
||||
|
||||
const opts = {
|
||||
oldSpec: args[0],
|
||||
newSpec: args[1],
|
||||
title: 'API Compatibility Report',
|
||||
failOnBreaking: false,
|
||||
};
|
||||
|
||||
for (let i = 2; i < args.length; i += 1) {
|
||||
const arg = args[i];
|
||||
if (arg === '--title' && args[i + 1]) {
|
||||
opts.title = args[i + 1];
|
||||
i += 1;
|
||||
} else if (arg === '--fail-on-breaking') {
|
||||
opts.failOnBreaking = true;
|
||||
}
|
||||
}
|
||||
|
||||
return opts;
|
||||
}
|
||||
|
||||
function runCompatDiff(oldSpec, newSpec) {
|
||||
const output = execFileSync(
|
||||
'node',
|
||||
['scripts/api-compat-diff.mjs', oldSpec, newSpec, '--output', 'json'],
|
||||
{ encoding: 'utf8' }
|
||||
);
|
||||
return JSON.parse(output);
|
||||
}
|
||||
|
||||
function formatList(items, symbol) {
|
||||
if (!items || items.length === 0) {
|
||||
return `${symbol} None`;
|
||||
}
|
||||
return items.map((item) => `${symbol} ${item}`).join('\n');
|
||||
}
|
||||
|
||||
function renderMarkdown(title, diff, oldSpec, newSpec) {
|
||||
return [
|
||||
`# ${title}`,
|
||||
'',
|
||||
`- Old spec: \`${path.relative(process.cwd(), oldSpec)}\``,
|
||||
`- New spec: \`${path.relative(process.cwd(), newSpec)}\``,
|
||||
'',
|
||||
'## Summary',
|
||||
`- Additive operations: ${diff.additive.operations.length}`,
|
||||
`- Breaking operations: ${diff.breaking.operations.length}`,
|
||||
`- Additive responses: ${diff.additive.responses.length}`,
|
||||
`- Breaking responses: ${diff.breaking.responses.length}`,
|
||||
'',
|
||||
'## Additive',
|
||||
'### Operations',
|
||||
formatList(diff.additive.operations, '-'),
|
||||
'',
|
||||
'### Responses',
|
||||
formatList(diff.additive.responses, '-'),
|
||||
'',
|
||||
'## Breaking',
|
||||
'### Operations',
|
||||
formatList(diff.breaking.operations, '-'),
|
||||
'',
|
||||
'### Responses',
|
||||
formatList(diff.breaking.responses, '-'),
|
||||
'',
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
function main() {
|
||||
const opts = parseArgs(process.argv);
|
||||
const diff = runCompatDiff(opts.oldSpec, opts.newSpec);
|
||||
const markdown = renderMarkdown(opts.title, diff, opts.oldSpec, opts.newSpec);
|
||||
console.log(markdown);
|
||||
|
||||
if (opts.failOnBreaking && (diff.breaking.operations.length > 0 || diff.breaking.responses.length > 0)) {
|
||||
process.exit(2);
|
||||
}
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
main();
|
||||
}
|
||||
26
devops/tools/api-compat/api-compat-changelog.test.mjs
Normal file
26
devops/tools/api-compat/api-compat-changelog.test.mjs
Normal file
@@ -0,0 +1,26 @@
|
||||
import assert from 'assert';
|
||||
import { fileURLToPath } from 'url';
|
||||
import path from 'path';
|
||||
import { execFileSync } from 'child_process';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const root = path.join(__dirname, '..');
|
||||
|
||||
const fixturesDir = path.join(root, 'scripts', '__fixtures__', 'api-compat');
|
||||
const oldSpec = path.join(fixturesDir, 'old.yaml');
|
||||
const newSpec = path.join(fixturesDir, 'new.yaml');
|
||||
|
||||
const output = execFileSync('node', ['scripts/api-compat-changelog.mjs', oldSpec, newSpec, '--title', 'Test Report'], {
|
||||
cwd: root,
|
||||
encoding: 'utf8',
|
||||
});
|
||||
|
||||
assert(output.includes('# Test Report'));
|
||||
assert(output.includes('Additive operations: 1'));
|
||||
assert(output.includes('Breaking operations: 0'));
|
||||
assert(output.includes('- get /bar'));
|
||||
assert(output.includes('- get /foo -> 201'));
|
||||
assert(output.includes('- get /foo -> 200'));
|
||||
|
||||
console.log('api-compat-changelog test passed');
|
||||
359
devops/tools/api-compat/api-compat-diff.mjs
Normal file
359
devops/tools/api-compat/api-compat-diff.mjs
Normal file
@@ -0,0 +1,359 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* API compatibility diff tool
|
||||
* Compares two OpenAPI 3.x specs (YAML or JSON) and reports additive vs breaking changes.
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/api-compat-diff.mjs <oldSpec> <newSpec> [--output json|text] [--fail-on-breaking]
|
||||
*
|
||||
* Output (text):
|
||||
* - Added/removed operations
|
||||
* - Added/removed responses
|
||||
* - Parameter additions/removals/requiredness changes
|
||||
* - Response content-type additions/removals
|
||||
* - Request body additions/removals/requiredness and content-type changes
|
||||
*
|
||||
* Output (json):
|
||||
* {
|
||||
* additive: { operations, responses, parameters, responseContentTypes, requestBodies },
|
||||
* breaking: { operations, responses, parameters, responseContentTypes, requestBodies }
|
||||
* }
|
||||
*
|
||||
* Exit codes:
|
||||
* 0 => success
|
||||
* 1 => invalid/missing args or IO/parsing error
|
||||
* 2 => breaking changes detected with --fail-on-breaking
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import process from 'process';
|
||||
import yaml from 'yaml';
|
||||
|
||||
function panic(message) {
|
||||
console.error(`[api-compat-diff] ${message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function parseArgs(argv) {
|
||||
const args = argv.slice(2);
|
||||
const opts = { output: 'text', failOnBreaking: false };
|
||||
|
||||
if (args.length < 2) {
|
||||
panic('Usage: node scripts/api-compat-diff.mjs <oldSpec> <newSpec> [--output json|text] [--fail-on-breaking]');
|
||||
}
|
||||
|
||||
[opts.oldSpec, opts.newSpec] = args.slice(0, 2);
|
||||
|
||||
for (let i = 2; i < args.length; i += 1) {
|
||||
const arg = args[i];
|
||||
if (arg === '--output' && args[i + 1]) {
|
||||
opts.output = args[i + 1].toLowerCase();
|
||||
i += 1;
|
||||
} else if (arg === '--fail-on-breaking') {
|
||||
opts.failOnBreaking = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!['text', 'json'].includes(opts.output)) {
|
||||
panic(`Unsupported output mode: ${opts.output}`);
|
||||
}
|
||||
|
||||
return opts;
|
||||
}
|
||||
|
||||
function loadSpec(specPath) {
|
||||
if (!fs.existsSync(specPath)) {
|
||||
panic(`Spec not found: ${specPath}`);
|
||||
}
|
||||
|
||||
const raw = fs.readFileSync(specPath, 'utf8');
|
||||
const ext = path.extname(specPath).toLowerCase();
|
||||
|
||||
try {
|
||||
if (ext === '.json') {
|
||||
return JSON.parse(raw);
|
||||
}
|
||||
return yaml.parse(raw);
|
||||
} catch (err) {
|
||||
panic(`Failed to parse ${specPath}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeParams(params) {
|
||||
const map = new Map();
|
||||
if (!Array.isArray(params)) return map;
|
||||
|
||||
for (const param of params) {
|
||||
if (!param || typeof param !== 'object') continue;
|
||||
if (param.$ref) {
|
||||
map.set(`ref:${param.$ref}`, { required: param.required === true, isRef: true });
|
||||
continue;
|
||||
}
|
||||
const name = param.name;
|
||||
const loc = param.in;
|
||||
if (!name || !loc) continue;
|
||||
const key = `${name}:${loc}`;
|
||||
map.set(key, { required: param.required === true, isRef: false });
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
function describeParam(key, requiredFlag) {
|
||||
if (key.startsWith('ref:')) {
|
||||
return key.replace(/^ref:/, '');
|
||||
}
|
||||
const [name, loc] = key.split(':');
|
||||
const requiredLabel = requiredFlag ? ' (required)' : '';
|
||||
return `${name} in ${loc}${requiredLabel}`;
|
||||
}
|
||||
|
||||
function enumerateOperations(spec) {
|
||||
const ops = new Map();
|
||||
if (!spec?.paths || typeof spec.paths !== 'object') {
|
||||
return ops;
|
||||
}
|
||||
|
||||
for (const [pathKey, pathItem] of Object.entries(spec.paths)) {
|
||||
if (!pathItem || typeof pathItem !== 'object') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const pathParams = normalizeParams(pathItem.parameters ?? []);
|
||||
|
||||
for (const method of Object.keys(pathItem)) {
|
||||
const lowerMethod = method.toLowerCase();
|
||||
if (!['get', 'put', 'post', 'delete', 'patch', 'head', 'options', 'trace'].includes(lowerMethod)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const op = pathItem[method];
|
||||
if (!op || typeof op !== 'object') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const opId = `${lowerMethod} ${pathKey}`;
|
||||
|
||||
const opParams = normalizeParams(op.parameters ?? []);
|
||||
const parameters = new Map(pathParams);
|
||||
for (const [key, val] of opParams.entries()) {
|
||||
parameters.set(key, val);
|
||||
}
|
||||
|
||||
const responseContentTypes = new Map();
|
||||
const responses = new Set();
|
||||
const responseEntries = Object.entries(op.responses ?? {});
|
||||
for (const [code, resp] of responseEntries) {
|
||||
responses.add(code);
|
||||
const contentTypes = new Set(Object.keys(resp?.content ?? {}));
|
||||
responseContentTypes.set(code, contentTypes);
|
||||
}
|
||||
|
||||
const requestBody = op.requestBody
|
||||
? {
|
||||
present: true,
|
||||
required: op.requestBody.required === true,
|
||||
contentTypes: new Set(Object.keys(op.requestBody.content ?? {})),
|
||||
}
|
||||
: { present: false, required: false, contentTypes: new Set() };
|
||||
|
||||
ops.set(opId, {
|
||||
method: lowerMethod,
|
||||
path: pathKey,
|
||||
responses,
|
||||
responseContentTypes,
|
||||
parameters,
|
||||
requestBody,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return ops;
|
||||
}
|
||||
|
||||
function diffOperations(oldOps, newOps) {
|
||||
const additiveOps = [];
|
||||
const breakingOps = [];
|
||||
const additiveResponses = [];
|
||||
const breakingResponses = [];
|
||||
const additiveParams = [];
|
||||
const breakingParams = [];
|
||||
const additiveResponseContentTypes = [];
|
||||
const breakingResponseContentTypes = [];
|
||||
const additiveRequestBodies = [];
|
||||
const breakingRequestBodies = [];
|
||||
|
||||
// Operations added or removed
|
||||
for (const [id] of newOps.entries()) {
|
||||
if (!oldOps.has(id)) {
|
||||
additiveOps.push(id);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [id] of oldOps.entries()) {
|
||||
if (!newOps.has(id)) {
|
||||
breakingOps.push(id);
|
||||
}
|
||||
}
|
||||
|
||||
// Response- and parameter-level diffs for shared operations
|
||||
for (const [id, newOp] of newOps.entries()) {
|
||||
if (!oldOps.has(id)) continue;
|
||||
const oldOp = oldOps.get(id);
|
||||
|
||||
for (const code of newOp.responses) {
|
||||
if (!oldOp.responses.has(code)) {
|
||||
additiveResponses.push(`${id} -> ${code}`);
|
||||
}
|
||||
}
|
||||
|
||||
for (const code of oldOp.responses) {
|
||||
if (!newOp.responses.has(code)) {
|
||||
breakingResponses.push(`${id} -> ${code}`);
|
||||
}
|
||||
}
|
||||
|
||||
for (const code of newOp.responses) {
|
||||
if (!oldOp.responses.has(code)) continue;
|
||||
const oldTypes = oldOp.responseContentTypes.get(code) ?? new Set();
|
||||
const newTypes = newOp.responseContentTypes.get(code) ?? new Set();
|
||||
|
||||
for (const ct of newTypes) {
|
||||
if (!oldTypes.has(ct)) {
|
||||
additiveResponseContentTypes.push(`${id} -> ${code} (${ct})`);
|
||||
}
|
||||
}
|
||||
for (const ct of oldTypes) {
|
||||
if (!newTypes.has(ct)) {
|
||||
breakingResponseContentTypes.push(`${id} -> ${code} (${ct})`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const [key, oldParam] of oldOp.parameters.entries()) {
|
||||
if (!newOp.parameters.has(key)) {
|
||||
breakingParams.push(`${id} -> - parameter ${describeParam(key, oldParam.required)}`);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [key, newParam] of newOp.parameters.entries()) {
|
||||
if (!oldOp.parameters.has(key)) {
|
||||
const target = newParam.required ? breakingParams : additiveParams;
|
||||
target.push(`${id} -> + parameter ${describeParam(key, newParam.required)}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const oldParam = oldOp.parameters.get(key);
|
||||
if (oldParam.required !== newParam.required) {
|
||||
if (newParam.required) {
|
||||
breakingParams.push(`${id} -> parameter ${describeParam(key)} made required`);
|
||||
} else {
|
||||
additiveParams.push(`${id} -> parameter ${describeParam(key)} made optional`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const { requestBody: oldBody } = oldOp;
|
||||
const { requestBody: newBody } = newOp;
|
||||
|
||||
if (oldBody.present && !newBody.present) {
|
||||
breakingRequestBodies.push(`${id} -> - requestBody`);
|
||||
} else if (!oldBody.present && newBody.present) {
|
||||
const target = newBody.required ? breakingRequestBodies : additiveRequestBodies;
|
||||
const label = newBody.required ? 'required' : 'optional';
|
||||
target.push(`${id} -> + requestBody (${label})`);
|
||||
} else if (oldBody.present && newBody.present) {
|
||||
if (oldBody.required !== newBody.required) {
|
||||
if (newBody.required) {
|
||||
breakingRequestBodies.push(`${id} -> requestBody made required`);
|
||||
} else {
|
||||
additiveRequestBodies.push(`${id} -> requestBody made optional`);
|
||||
}
|
||||
}
|
||||
|
||||
for (const ct of newBody.contentTypes) {
|
||||
if (!oldBody.contentTypes.has(ct)) {
|
||||
additiveRequestBodies.push(`${id} -> requestBody content-type added: ${ct}`);
|
||||
}
|
||||
}
|
||||
for (const ct of oldBody.contentTypes) {
|
||||
if (!newBody.contentTypes.has(ct)) {
|
||||
breakingRequestBodies.push(`${id} -> requestBody content-type removed: ${ct}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
additive: {
|
||||
operations: additiveOps.sort(),
|
||||
responses: additiveResponses.sort(),
|
||||
parameters: additiveParams.sort(),
|
||||
responseContentTypes: additiveResponseContentTypes.sort(),
|
||||
requestBodies: additiveRequestBodies.sort(),
|
||||
},
|
||||
breaking: {
|
||||
operations: breakingOps.sort(),
|
||||
responses: breakingResponses.sort(),
|
||||
parameters: breakingParams.sort(),
|
||||
responseContentTypes: breakingResponseContentTypes.sort(),
|
||||
requestBodies: breakingRequestBodies.sort(),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function renderText(diff) {
|
||||
const lines = [];
|
||||
lines.push('Additive:');
|
||||
lines.push(` Operations: ${diff.additive.operations.length}`);
|
||||
diff.additive.operations.forEach((op) => lines.push(` + ${op}`));
|
||||
lines.push(` Responses: ${diff.additive.responses.length}`);
|
||||
diff.additive.responses.forEach((resp) => lines.push(` + ${resp}`));
|
||||
lines.push(` Parameters: ${diff.additive.parameters.length}`);
|
||||
diff.additive.parameters.forEach((param) => lines.push(` + ${param}`));
|
||||
lines.push(` Response content-types: ${diff.additive.responseContentTypes.length}`);
|
||||
diff.additive.responseContentTypes.forEach((ct) => lines.push(` + ${ct}`));
|
||||
lines.push(` Request bodies: ${diff.additive.requestBodies.length}`);
|
||||
diff.additive.requestBodies.forEach((rb) => lines.push(` + ${rb}`));
|
||||
lines.push('Breaking:');
|
||||
lines.push(` Operations: ${diff.breaking.operations.length}`);
|
||||
diff.breaking.operations.forEach((op) => lines.push(` - ${op}`));
|
||||
lines.push(` Responses: ${diff.breaking.responses.length}`);
|
||||
diff.breaking.responses.forEach((resp) => lines.push(` - ${resp}`));
|
||||
lines.push(` Parameters: ${diff.breaking.parameters.length}`);
|
||||
diff.breaking.parameters.forEach((param) => lines.push(` - ${param}`));
|
||||
lines.push(` Response content-types: ${diff.breaking.responseContentTypes.length}`);
|
||||
diff.breaking.responseContentTypes.forEach((ct) => lines.push(` - ${ct}`));
|
||||
lines.push(` Request bodies: ${diff.breaking.requestBodies.length}`);
|
||||
diff.breaking.requestBodies.forEach((rb) => lines.push(` - ${rb}`));
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function main() {
|
||||
const opts = parseArgs(process.argv);
|
||||
const oldSpec = loadSpec(opts.oldSpec);
|
||||
const newSpec = loadSpec(opts.newSpec);
|
||||
|
||||
const diff = diffOperations(enumerateOperations(oldSpec), enumerateOperations(newSpec));
|
||||
|
||||
if (opts.output === 'json') {
|
||||
console.log(JSON.stringify(diff, null, 2));
|
||||
} else {
|
||||
console.log(renderText(diff));
|
||||
}
|
||||
|
||||
if (opts.failOnBreaking && (
|
||||
diff.breaking.operations.length > 0
|
||||
|| diff.breaking.responses.length > 0
|
||||
|| diff.breaking.parameters.length > 0
|
||||
|| diff.breaking.responseContentTypes.length > 0
|
||||
|| diff.breaking.requestBodies.length > 0
|
||||
)) {
|
||||
process.exit(2);
|
||||
}
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
main();
|
||||
}
|
||||
34
devops/tools/api-compat/api-compat-diff.test.mjs
Normal file
34
devops/tools/api-compat/api-compat-diff.test.mjs
Normal file
@@ -0,0 +1,34 @@
|
||||
import assert from 'assert';
|
||||
import { fileURLToPath } from 'url';
|
||||
import path from 'path';
|
||||
import { execFileSync } from 'child_process';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const fixturesDir = path.join(__dirname, '__fixtures__', 'api-compat');
|
||||
const oldSpec = path.join(fixturesDir, 'old.yaml');
|
||||
const newSpec = path.join(fixturesDir, 'new.yaml');
|
||||
|
||||
const output = execFileSync('node', ['scripts/api-compat-diff.mjs', oldSpec, newSpec, '--output', 'json'], {
|
||||
cwd: path.join(__dirname, '..'),
|
||||
encoding: 'utf8',
|
||||
});
|
||||
|
||||
const diff = JSON.parse(output);
|
||||
|
||||
assert.deepStrictEqual(diff.additive.operations, ['get /bar']);
|
||||
assert.deepStrictEqual(diff.breaking.operations, []);
|
||||
assert.deepStrictEqual(diff.additive.responses, ['get /foo -> 201']);
|
||||
assert.deepStrictEqual(diff.breaking.responses, ['get /foo -> 200']);
|
||||
assert.deepStrictEqual(diff.additive.parameters, []);
|
||||
assert.deepStrictEqual(diff.breaking.parameters, [
|
||||
'get /foo -> + parameter tenant in query (required)',
|
||||
'get /foo -> - parameter filter in query',
|
||||
]);
|
||||
assert.deepStrictEqual(diff.additive.requestBodies, []);
|
||||
assert.deepStrictEqual(diff.breaking.requestBodies, ['post /baz -> requestBody made required']);
|
||||
assert.deepStrictEqual(diff.additive.responseContentTypes, []);
|
||||
assert.deepStrictEqual(diff.breaking.responseContentTypes, []);
|
||||
|
||||
console.log('api-compat-diff test passed');
|
||||
139
devops/tools/api-compat/api-example-coverage.mjs
Normal file
139
devops/tools/api-compat/api-example-coverage.mjs
Normal file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env node
|
||||
// Verifies every OpenAPI operation has at least one request example and one response example.
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { parse } from 'yaml';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
const OAS_ROOT = path.join(ROOT, 'src', 'Api', 'StellaOps.Api.OpenApi');
|
||||
|
||||
async function main() {
|
||||
if (!fs.existsSync(OAS_ROOT)) {
|
||||
console.log('[api:examples] no OpenAPI directory found; skipping');
|
||||
return;
|
||||
}
|
||||
|
||||
const files = await findYamlFiles(OAS_ROOT);
|
||||
if (files.length === 0) {
|
||||
console.log('[api:examples] no OpenAPI files found; skipping');
|
||||
return;
|
||||
}
|
||||
|
||||
const failures = [];
|
||||
|
||||
for (const relative of files) {
|
||||
const fullPath = path.join(OAS_ROOT, relative);
|
||||
const content = fs.readFileSync(fullPath, 'utf8');
|
||||
let doc;
|
||||
try {
|
||||
doc = parse(content, { prettyErrors: true });
|
||||
} catch (err) {
|
||||
failures.push({ file: relative, path: '', method: '', reason: `YAML parse error: ${err.message}` });
|
||||
continue;
|
||||
}
|
||||
|
||||
const paths = doc?.paths || {};
|
||||
for (const [route, methods] of Object.entries(paths)) {
|
||||
for (const [method, operation] of Object.entries(methods || {})) {
|
||||
if (!isHttpMethod(method)) continue;
|
||||
|
||||
const hasRequestExample = operation?.requestBody ? hasExample(operation.requestBody) : true;
|
||||
const hasResponseExample = Object.values(operation?.responses || {}).some(resp => hasExample(resp));
|
||||
|
||||
if (!hasRequestExample || !hasResponseExample) {
|
||||
const missing = [];
|
||||
if (!hasRequestExample) missing.push('request');
|
||||
if (!hasResponseExample) missing.push('response');
|
||||
failures.push({ file: relative, path: route, method, reason: `missing ${missing.join(' & ')} example` });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (failures.length > 0) {
|
||||
console.error('[api:examples] found operations without examples:');
|
||||
for (const f of failures) {
|
||||
const locus = [f.file, f.path, f.method.toUpperCase()].filter(Boolean).join(' ');
|
||||
console.error(` - ${locus}: ${f.reason}`);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('[api:examples] all operations contain request and response examples');
|
||||
}
|
||||
|
||||
async function findYamlFiles(root) {
|
||||
const results = [];
|
||||
async function walk(dir) {
|
||||
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const full = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
await walk(full);
|
||||
} else if (entry.isFile() && entry.name.toLowerCase().endsWith('.yaml')) {
|
||||
results.push(path.relative(root, full));
|
||||
}
|
||||
}
|
||||
}
|
||||
await walk(root);
|
||||
return results;
|
||||
}
|
||||
|
||||
function isHttpMethod(method) {
|
||||
return ['get', 'post', 'put', 'patch', 'delete', 'options', 'head', 'trace'].includes(method.toLowerCase());
|
||||
}
|
||||
|
||||
function hasExample(node) {
|
||||
if (!node) return false;
|
||||
|
||||
// request/response objects may include content -> mediaType -> schema/example/examples
|
||||
const content = node.content || {};
|
||||
for (const media of Object.values(content)) {
|
||||
if (!media) continue;
|
||||
if (media.example !== undefined) return true;
|
||||
if (media.examples && Object.keys(media.examples).length > 0) return true;
|
||||
if (media.schema && hasSchemaExample(media.schema)) return true;
|
||||
}
|
||||
|
||||
// response objects may have "examples" directly (non-standard but allowed by spectral rules)
|
||||
if (node.examples && Object.keys(node.examples).length > 0) return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function hasSchemaExample(schema) {
|
||||
if (!schema) return false;
|
||||
if (schema.example !== undefined) return true;
|
||||
if (schema.examples && Array.isArray(schema.examples) && schema.examples.length > 0) return true;
|
||||
|
||||
// Recurse into allOf/oneOf/anyOf
|
||||
const composites = ['allOf', 'oneOf', 'anyOf'];
|
||||
for (const key of composites) {
|
||||
if (Array.isArray(schema[key])) {
|
||||
if (schema[key].some(hasSchemaExample)) return true;
|
||||
}
|
||||
}
|
||||
|
||||
// For objects, check properties
|
||||
if (schema.type === 'object' && schema.properties) {
|
||||
for (const value of Object.values(schema.properties)) {
|
||||
if (hasSchemaExample(value)) return true;
|
||||
}
|
||||
}
|
||||
|
||||
// For arrays, check items
|
||||
if (schema.type === 'array' && schema.items) {
|
||||
return hasSchemaExample(schema.items);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error('[api:examples] fatal error', err);
|
||||
process.exit(1);
|
||||
});
|
||||
63
devops/tools/attest/build-attestation-bundle.sh
Normal file
63
devops/tools/attest/build-attestation-bundle.sh
Normal file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# DEVOPS-ATTEST-74-002: package attestation outputs into an offline bundle with checksums.
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: $0 <attest-dir> [bundle-out]" >&2
|
||||
exit 64
|
||||
fi
|
||||
|
||||
ATTEST_DIR=$1
|
||||
BUNDLE_OUT=${2:-"out/attest-bundles"}
|
||||
|
||||
if [[ ! -d "$ATTEST_DIR" ]]; then
|
||||
echo "[attest-bundle] attestation directory not found: $ATTEST_DIR" >&2
|
||||
exit 66
|
||||
fi
|
||||
|
||||
mkdir -p "$BUNDLE_OUT"
|
||||
|
||||
TS=$(date -u +"%Y%m%dT%H%M%SZ")
|
||||
BUNDLE_NAME="attestation-bundle-${TS}"
|
||||
WORK_DIR="${BUNDLE_OUT}/${BUNDLE_NAME}"
|
||||
mkdir -p "$WORK_DIR"
|
||||
|
||||
copy_if_exists() {
|
||||
local pattern="$1"
|
||||
shopt -s nullglob
|
||||
local files=("$ATTEST_DIR"/$pattern)
|
||||
if (( ${#files[@]} > 0 )); then
|
||||
cp "${files[@]}" "$WORK_DIR/"
|
||||
fi
|
||||
shopt -u nullglob
|
||||
}
|
||||
|
||||
# Collect common attestation artefacts
|
||||
copy_if_exists "*.dsse.json"
|
||||
copy_if_exists "*.in-toto.jsonl"
|
||||
copy_if_exists "*.sarif"
|
||||
copy_if_exists "*.intoto.json"
|
||||
copy_if_exists "*.rekor.txt"
|
||||
copy_if_exists "*.sig"
|
||||
copy_if_exists "*.crt"
|
||||
copy_if_exists "*.pem"
|
||||
copy_if_exists "*.json"
|
||||
|
||||
# Manifest
|
||||
cat > "${WORK_DIR}/manifest.json" <<EOF
|
||||
{
|
||||
"created_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
|
||||
"source_dir": "${ATTEST_DIR}",
|
||||
"files": $(ls -1 "${WORK_DIR}" | jq -R . | jq -s .)
|
||||
}
|
||||
EOF
|
||||
|
||||
# Checksums
|
||||
(
|
||||
cd "$WORK_DIR"
|
||||
sha256sum * > SHA256SUMS
|
||||
)
|
||||
|
||||
tar -C "$BUNDLE_OUT" -czf "${WORK_DIR}.tgz" "${BUNDLE_NAME}"
|
||||
echo "[attest-bundle] bundle created at ${WORK_DIR}.tgz"
|
||||
163
devops/tools/audit-crypto-usage.ps1
Normal file
163
devops/tools/audit-crypto-usage.ps1
Normal file
@@ -0,0 +1,163 @@
|
||||
#!/usr/bin/env pwsh
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Audits the codebase for direct usage of System.Security.Cryptography in production code.
|
||||
|
||||
.DESCRIPTION
|
||||
This script scans the codebase for direct usage of System.Security.Cryptography namespace,
|
||||
which should only be used within crypto provider plugin implementations, not in production code.
|
||||
|
||||
All cryptographic operations in production code should use the ICryptoProvider abstraction.
|
||||
|
||||
.PARAMETER RootPath
|
||||
The root path of the StellaOps repository. Defaults to parent directory of this script.
|
||||
|
||||
.PARAMETER FailOnViolations
|
||||
If set, the script will exit with code 1 when violations are found. Default: true.
|
||||
|
||||
.PARAMETER Verbose
|
||||
Enable verbose output showing all scanned files.
|
||||
|
||||
.EXAMPLE
|
||||
.\audit-crypto-usage.ps1
|
||||
|
||||
.EXAMPLE
|
||||
.\audit-crypto-usage.ps1 -RootPath "C:\dev\git.stella-ops.org" -FailOnViolations $true
|
||||
#>
|
||||
|
||||
param(
|
||||
[Parameter(Mandatory=$false)]
|
||||
[string]$RootPath = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot)),
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[bool]$FailOnViolations = $true,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[switch]$Verbose
|
||||
)
|
||||
|
||||
Set-StrictMode -Version Latest
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# ANSI color codes for output
|
||||
$Red = "`e[31m"
|
||||
$Green = "`e[32m"
|
||||
$Yellow = "`e[33m"
|
||||
$Blue = "`e[34m"
|
||||
$Reset = "`e[0m"
|
||||
|
||||
Write-Host "${Blue}==================================================================${Reset}"
|
||||
Write-Host "${Blue}StellaOps Cryptography Usage Audit${Reset}"
|
||||
Write-Host "${Blue}==================================================================${Reset}"
|
||||
Write-Host ""
|
||||
|
||||
# Patterns to search for
|
||||
$directCryptoPattern = "using System\.Security\.Cryptography"
|
||||
|
||||
# Allowed paths where direct crypto usage is permitted
|
||||
$allowedPathPatterns = @(
|
||||
"\\__Libraries\\StellaOps\.Cryptography\.Plugin\.", # All crypto plugins
|
||||
"\\__Tests\\", # Test code
|
||||
"\\third_party\\", # Third-party code
|
||||
"\\bench\\", # Benchmark code
|
||||
"\\.git\\" # Git metadata
|
||||
)
|
||||
|
||||
# Compile regex for performance
|
||||
$allowedRegex = ($allowedPathPatterns | ForEach-Object { [regex]::Escape($_) }) -join "|"
|
||||
|
||||
Write-Host "Scanning for direct crypto usage in production code..."
|
||||
Write-Host "Root path: ${Blue}$RootPath${Reset}"
|
||||
Write-Host ""
|
||||
|
||||
# Find all C# files
|
||||
$allCsFiles = Get-ChildItem -Path $RootPath -Recurse -Filter "*.cs" -ErrorAction SilentlyContinue
|
||||
|
||||
$scannedCount = 0
|
||||
$violations = @()
|
||||
|
||||
foreach ($file in $allCsFiles) {
|
||||
$scannedCount++
|
||||
|
||||
# Check if file is in an allowed path
|
||||
$relativePath = $file.FullName.Substring($RootPath.Length)
|
||||
$isAllowed = $relativePath -match $allowedRegex
|
||||
|
||||
if ($isAllowed) {
|
||||
if ($Verbose) {
|
||||
Write-Host "${Green}[SKIP]${Reset} $relativePath (allowed path)"
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
# Search for direct crypto usage
|
||||
$matches = Select-String -Path $file.FullName -Pattern $directCryptoPattern -ErrorAction SilentlyContinue
|
||||
|
||||
if ($matches) {
|
||||
foreach ($match in $matches) {
|
||||
$violations += [PSCustomObject]@{
|
||||
File = $relativePath
|
||||
Line = $match.LineNumber
|
||||
Content = $match.Line.Trim()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ($Verbose) {
|
||||
Write-Host "${Green}[OK]${Reset} $relativePath"
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "${Blue}==================================================================${Reset}"
|
||||
Write-Host "Scan Results"
|
||||
Write-Host "${Blue}==================================================================${Reset}"
|
||||
Write-Host "Total C# files scanned: ${Blue}$scannedCount${Reset}"
|
||||
Write-Host "Violations found: $(if ($violations.Count -gt 0) { "${Red}$($violations.Count)${Reset}" } else { "${Green}0${Reset}" })"
|
||||
Write-Host ""
|
||||
|
||||
if ($violations.Count -gt 0) {
|
||||
Write-Host "${Red}FAILED: Direct crypto usage detected in production code!${Reset}"
|
||||
Write-Host ""
|
||||
Write-Host "The following files use ${Yellow}System.Security.Cryptography${Reset} directly:"
|
||||
Write-Host "Production code must use ${Green}ICryptoProvider${Reset} abstraction instead."
|
||||
Write-Host ""
|
||||
|
||||
$groupedViolations = $violations | Group-Object -Property File
|
||||
|
||||
foreach ($group in $groupedViolations) {
|
||||
Write-Host "${Red}✗${Reset} $($group.Name)"
|
||||
foreach ($violation in $group.Group) {
|
||||
Write-Host " Line $($violation.Line): $($violation.Content)"
|
||||
}
|
||||
Write-Host ""
|
||||
}
|
||||
|
||||
Write-Host "${Yellow}How to fix:${Reset}"
|
||||
Write-Host "1. Use ${Green}ICryptoProviderRegistry.ResolveSigner()${Reset} or ${Green}.ResolveHasher()${Reset}"
|
||||
Write-Host "2. Inject ${Green}ICryptoProviderRegistry${Reset} via dependency injection"
|
||||
Write-Host "3. For offline/airgap scenarios, use ${Green}OfflineVerificationCryptoProvider${Reset}"
|
||||
Write-Host ""
|
||||
Write-Host "Example refactoring:"
|
||||
Write-Host "${Red}// BEFORE (❌ Not allowed)${Reset}"
|
||||
Write-Host "using System.Security.Cryptography;"
|
||||
Write-Host "var hash = SHA256.HashData(data);"
|
||||
Write-Host ""
|
||||
Write-Host "${Green}// AFTER (✅ Correct)${Reset}"
|
||||
Write-Host "using StellaOps.Cryptography;"
|
||||
Write-Host "var hasher = _cryptoRegistry.ResolveHasher(\"SHA-256\");"
|
||||
Write-Host "var hash = hasher.Hasher.ComputeHash(data);"
|
||||
Write-Host ""
|
||||
|
||||
if ($FailOnViolations) {
|
||||
Write-Host "${Red}Audit failed. Exiting with code 1.${Reset}"
|
||||
exit 1
|
||||
} else {
|
||||
Write-Host "${Yellow}Audit failed but FailOnViolations is false. Continuing...${Reset}"
|
||||
}
|
||||
} else {
|
||||
Write-Host "${Green}✓ SUCCESS: No direct crypto usage found in production code!${Reset}"
|
||||
Write-Host ""
|
||||
Write-Host "All cryptographic operations correctly use the ${Green}ICryptoProvider${Reset} abstraction."
|
||||
exit 0
|
||||
}
|
||||
13
devops/tools/bench/README.md
Normal file
13
devops/tools/bench/README.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# Bench scripts
|
||||
|
||||
- `determinism-run.sh`: runs BENCH-DETERMINISM-401-057 harness (`src/Bench/StellaOps.Bench/Determinism`), writes artifacts to `out/bench-determinism`, and enforces threshold via `BENCH_DETERMINISM_THRESHOLD` (default 0.95). Defaults to 10 runs per scanner/SBOM pair. Pass `DET_EXTRA_INPUTS` (space-separated globs) to include frozen feeds in `inputs.sha256`; `DET_RUN_EXTRA_ARGS` to forward extra args to the harness; `DET_REACH_GRAPHS`/`DET_REACH_RUNTIME` to hash reachability datasets and emit `dataset.sha256` + `results-reach.*`.
|
||||
- `offline_run.sh` (in `Determinism/`): air-gapped runner that reads inputs from `offline/inputs`, writes to `offline/results`, defaults runs=10 threshold=0.95, and calls reachability hashing when graph/runtime inputs exist.
|
||||
|
||||
Usage:
|
||||
```sh
|
||||
BENCH_DETERMINISM_THRESHOLD=0.97 \
|
||||
DET_EXTRA_INPUTS="offline/feeds/*.tar.gz" \
|
||||
DET_REACH_GRAPHS="offline/reachability/graphs/*.json" \
|
||||
DET_REACH_RUNTIME="offline/reachability/runtime/*.ndjson" \
|
||||
scripts/bench/determinism-run.sh
|
||||
```
|
||||
353
devops/tools/bench/compute-metrics.py
Normal file
353
devops/tools/bench/compute-metrics.py
Normal file
@@ -0,0 +1,353 @@
|
||||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# BENCH-AUTO-401-019: Compute FP/MTTD/repro metrics from bench findings
|
||||
|
||||
"""
|
||||
Computes benchmark metrics from src/__Tests/__Benchmarks/findings/** and outputs to results/summary.csv.
|
||||
|
||||
Metrics:
|
||||
- True Positives (TP): Reachable vulns correctly identified
|
||||
- False Positives (FP): Unreachable vulns incorrectly marked affected
|
||||
- True Negatives (TN): Unreachable vulns correctly marked not_affected
|
||||
- False Negatives (FN): Reachable vulns missed
|
||||
- MTTD: Mean Time To Detect (simulated)
|
||||
- Reproducibility: Determinism score
|
||||
|
||||
Usage:
|
||||
python scripts/bench/compute-metrics.py [--findings PATH] [--output PATH] [--baseline PATH]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
@dataclass
|
||||
class FindingMetrics:
|
||||
"""Metrics for a single finding."""
|
||||
finding_id: str
|
||||
cve_id: str
|
||||
variant: str # reachable or unreachable
|
||||
vex_status: str # affected or not_affected
|
||||
is_correct: bool
|
||||
detection_time_ms: float = 0.0
|
||||
evidence_hash: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class AggregateMetrics:
|
||||
"""Aggregated benchmark metrics."""
|
||||
total_findings: int = 0
|
||||
true_positives: int = 0 # reachable + affected
|
||||
false_positives: int = 0 # unreachable + affected
|
||||
true_negatives: int = 0 # unreachable + not_affected
|
||||
false_negatives: int = 0 # reachable + not_affected
|
||||
mttd_ms: float = 0.0
|
||||
reproducibility: float = 1.0
|
||||
findings: list = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def precision(self) -> float:
|
||||
"""TP / (TP + FP)"""
|
||||
denom = self.true_positives + self.false_positives
|
||||
return self.true_positives / denom if denom > 0 else 0.0
|
||||
|
||||
@property
|
||||
def recall(self) -> float:
|
||||
"""TP / (TP + FN)"""
|
||||
denom = self.true_positives + self.false_negatives
|
||||
return self.true_positives / denom if denom > 0 else 0.0
|
||||
|
||||
@property
|
||||
def f1_score(self) -> float:
|
||||
"""2 * (precision * recall) / (precision + recall)"""
|
||||
p, r = self.precision, self.recall
|
||||
return 2 * p * r / (p + r) if (p + r) > 0 else 0.0
|
||||
|
||||
@property
|
||||
def accuracy(self) -> float:
|
||||
"""(TP + TN) / total"""
|
||||
correct = self.true_positives + self.true_negatives
|
||||
return correct / self.total_findings if self.total_findings > 0 else 0.0
|
||||
|
||||
|
||||
def load_finding(finding_dir: Path) -> FindingMetrics | None:
|
||||
"""Load a finding from its directory."""
|
||||
metadata_path = finding_dir / "metadata.json"
|
||||
openvex_path = finding_dir / "decision.openvex.json"
|
||||
|
||||
if not metadata_path.exists() or not openvex_path.exists():
|
||||
return None
|
||||
|
||||
with open(metadata_path, 'r', encoding='utf-8') as f:
|
||||
metadata = json.load(f)
|
||||
|
||||
with open(openvex_path, 'r', encoding='utf-8') as f:
|
||||
openvex = json.load(f)
|
||||
|
||||
# Extract VEX status
|
||||
statements = openvex.get("statements", [])
|
||||
vex_status = statements[0].get("status", "unknown") if statements else "unknown"
|
||||
|
||||
# Determine correctness
|
||||
variant = metadata.get("variant", "unknown")
|
||||
is_correct = (
|
||||
(variant == "reachable" and vex_status == "affected") or
|
||||
(variant == "unreachable" and vex_status == "not_affected")
|
||||
)
|
||||
|
||||
# Extract evidence hash from impact_statement
|
||||
evidence_hash = ""
|
||||
if statements:
|
||||
impact = statements[0].get("impact_statement", "")
|
||||
if "Evidence hash:" in impact:
|
||||
evidence_hash = impact.split("Evidence hash:")[1].strip()
|
||||
|
||||
return FindingMetrics(
|
||||
finding_id=finding_dir.name,
|
||||
cve_id=metadata.get("cve_id", "UNKNOWN"),
|
||||
variant=variant,
|
||||
vex_status=vex_status,
|
||||
is_correct=is_correct,
|
||||
evidence_hash=evidence_hash
|
||||
)
|
||||
|
||||
|
||||
def compute_metrics(findings_dir: Path) -> AggregateMetrics:
|
||||
"""Compute aggregate metrics from all findings."""
|
||||
metrics = AggregateMetrics()
|
||||
|
||||
if not findings_dir.exists():
|
||||
return metrics
|
||||
|
||||
for finding_path in sorted(findings_dir.iterdir()):
|
||||
if not finding_path.is_dir():
|
||||
continue
|
||||
|
||||
finding = load_finding(finding_path)
|
||||
if finding is None:
|
||||
continue
|
||||
|
||||
metrics.total_findings += 1
|
||||
metrics.findings.append(finding)
|
||||
|
||||
# Classify finding
|
||||
if finding.variant == "reachable":
|
||||
if finding.vex_status == "affected":
|
||||
metrics.true_positives += 1
|
||||
else:
|
||||
metrics.false_negatives += 1
|
||||
else: # unreachable
|
||||
if finding.vex_status == "not_affected":
|
||||
metrics.true_negatives += 1
|
||||
else:
|
||||
metrics.false_positives += 1
|
||||
|
||||
# Compute MTTD (simulated - based on evidence availability)
|
||||
# In real scenarios, this would be the time from CVE publication to detection
|
||||
metrics.mttd_ms = sum(f.detection_time_ms for f in metrics.findings)
|
||||
if metrics.total_findings > 0:
|
||||
metrics.mttd_ms /= metrics.total_findings
|
||||
|
||||
return metrics
|
||||
|
||||
|
||||
def load_baseline(baseline_path: Path) -> dict:
|
||||
"""Load baseline scanner results for comparison."""
|
||||
if not baseline_path.exists():
|
||||
return {}
|
||||
|
||||
with open(baseline_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def compare_with_baseline(metrics: AggregateMetrics, baseline: dict) -> dict:
|
||||
"""Compare StellaOps metrics with baseline scanner."""
|
||||
comparison = {
|
||||
"stellaops": {
|
||||
"precision": metrics.precision,
|
||||
"recall": metrics.recall,
|
||||
"f1_score": metrics.f1_score,
|
||||
"accuracy": metrics.accuracy,
|
||||
"false_positive_rate": metrics.false_positives / metrics.total_findings if metrics.total_findings > 0 else 0
|
||||
}
|
||||
}
|
||||
|
||||
if baseline:
|
||||
# Extract baseline metrics
|
||||
baseline_metrics = baseline.get("metrics", {})
|
||||
comparison["baseline"] = {
|
||||
"precision": baseline_metrics.get("precision", 0),
|
||||
"recall": baseline_metrics.get("recall", 0),
|
||||
"f1_score": baseline_metrics.get("f1_score", 0),
|
||||
"accuracy": baseline_metrics.get("accuracy", 0),
|
||||
"false_positive_rate": baseline_metrics.get("false_positive_rate", 0)
|
||||
}
|
||||
|
||||
# Compute deltas
|
||||
comparison["delta"] = {
|
||||
k: comparison["stellaops"][k] - comparison["baseline"].get(k, 0)
|
||||
for k in comparison["stellaops"]
|
||||
}
|
||||
|
||||
return comparison
|
||||
|
||||
|
||||
def write_summary_csv(metrics: AggregateMetrics, comparison: dict, output_path: Path):
|
||||
"""Write summary.csv with all metrics."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(output_path, 'w', newline='', encoding='utf-8') as f:
|
||||
writer = csv.writer(f)
|
||||
|
||||
# Header
|
||||
writer.writerow([
|
||||
"timestamp",
|
||||
"total_findings",
|
||||
"true_positives",
|
||||
"false_positives",
|
||||
"true_negatives",
|
||||
"false_negatives",
|
||||
"precision",
|
||||
"recall",
|
||||
"f1_score",
|
||||
"accuracy",
|
||||
"mttd_ms",
|
||||
"reproducibility"
|
||||
])
|
||||
|
||||
# Data row
|
||||
writer.writerow([
|
||||
datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
metrics.total_findings,
|
||||
metrics.true_positives,
|
||||
metrics.false_positives,
|
||||
metrics.true_negatives,
|
||||
metrics.false_negatives,
|
||||
f"{metrics.precision:.4f}",
|
||||
f"{metrics.recall:.4f}",
|
||||
f"{metrics.f1_score:.4f}",
|
||||
f"{metrics.accuracy:.4f}",
|
||||
f"{metrics.mttd_ms:.2f}",
|
||||
f"{metrics.reproducibility:.4f}"
|
||||
])
|
||||
|
||||
|
||||
def write_detailed_json(metrics: AggregateMetrics, comparison: dict, output_path: Path):
|
||||
"""Write detailed JSON report."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
report = {
|
||||
"generated_at": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"summary": {
|
||||
"total_findings": metrics.total_findings,
|
||||
"true_positives": metrics.true_positives,
|
||||
"false_positives": metrics.false_positives,
|
||||
"true_negatives": metrics.true_negatives,
|
||||
"false_negatives": metrics.false_negatives,
|
||||
"precision": metrics.precision,
|
||||
"recall": metrics.recall,
|
||||
"f1_score": metrics.f1_score,
|
||||
"accuracy": metrics.accuracy,
|
||||
"mttd_ms": metrics.mttd_ms,
|
||||
"reproducibility": metrics.reproducibility
|
||||
},
|
||||
"comparison": comparison,
|
||||
"findings": [
|
||||
{
|
||||
"finding_id": f.finding_id,
|
||||
"cve_id": f.cve_id,
|
||||
"variant": f.variant,
|
||||
"vex_status": f.vex_status,
|
||||
"is_correct": f.is_correct,
|
||||
"evidence_hash": f.evidence_hash
|
||||
}
|
||||
for f in metrics.findings
|
||||
]
|
||||
}
|
||||
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(report, f, indent=2, sort_keys=True)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Compute FP/MTTD/repro metrics from bench findings"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--findings",
|
||||
type=Path,
|
||||
default=Path("src/__Tests/__Benchmarks/findings"),
|
||||
help="Path to findings directory"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
type=Path,
|
||||
default=Path("src/__Tests/__Benchmarks/results"),
|
||||
help="Output directory for metrics"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--baseline",
|
||||
type=Path,
|
||||
default=None,
|
||||
help="Path to baseline scanner results JSON"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--json",
|
||||
action="store_true",
|
||||
help="Also output detailed JSON report"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Resolve paths relative to repo root
|
||||
repo_root = Path(__file__).parent.parent.parent
|
||||
findings_path = repo_root / args.findings if not args.findings.is_absolute() else args.findings
|
||||
output_path = repo_root / args.output if not args.output.is_absolute() else args.output
|
||||
|
||||
print(f"Findings path: {findings_path}")
|
||||
print(f"Output path: {output_path}")
|
||||
|
||||
# Compute metrics
|
||||
metrics = compute_metrics(findings_path)
|
||||
|
||||
print(f"\nMetrics Summary:")
|
||||
print(f" Total findings: {metrics.total_findings}")
|
||||
print(f" True Positives: {metrics.true_positives}")
|
||||
print(f" False Positives: {metrics.false_positives}")
|
||||
print(f" True Negatives: {metrics.true_negatives}")
|
||||
print(f" False Negatives: {metrics.false_negatives}")
|
||||
print(f" Precision: {metrics.precision:.4f}")
|
||||
print(f" Recall: {metrics.recall:.4f}")
|
||||
print(f" F1 Score: {metrics.f1_score:.4f}")
|
||||
print(f" Accuracy: {metrics.accuracy:.4f}")
|
||||
|
||||
# Load baseline if provided
|
||||
baseline = {}
|
||||
if args.baseline:
|
||||
baseline_path = repo_root / args.baseline if not args.baseline.is_absolute() else args.baseline
|
||||
baseline = load_baseline(baseline_path)
|
||||
if baseline:
|
||||
print(f"\nBaseline comparison loaded from: {baseline_path}")
|
||||
|
||||
comparison = compare_with_baseline(metrics, baseline)
|
||||
|
||||
# Write outputs
|
||||
write_summary_csv(metrics, comparison, output_path / "summary.csv")
|
||||
print(f"\nWrote summary to: {output_path / 'summary.csv'}")
|
||||
|
||||
if args.json:
|
||||
write_detailed_json(metrics, comparison, output_path / "metrics.json")
|
||||
print(f"Wrote detailed report to: {output_path / 'metrics.json'}")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
55
devops/tools/bench/determinism-run.sh
Normal file
55
devops/tools/bench/determinism-run.sh
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# BENCH-DETERMINISM-401-057: run determinism harness and collect artifacts
|
||||
|
||||
ROOT="$(git rev-parse --show-toplevel)"
|
||||
HARNESS="${ROOT}/src/Bench/StellaOps.Bench/Determinism"
|
||||
OUT="${ROOT}/out/bench-determinism"
|
||||
THRESHOLD="${BENCH_DETERMINISM_THRESHOLD:-0.95}"
|
||||
mkdir -p "$OUT"
|
||||
|
||||
cd "$HARNESS"
|
||||
|
||||
python run_bench.py \
|
||||
--sboms inputs/sboms/*.json \
|
||||
--vex inputs/vex/*.json \
|
||||
--config configs/scanners.json \
|
||||
--runs 10 \
|
||||
--shuffle \
|
||||
--output results \
|
||||
--manifest-extra "${DET_EXTRA_INPUTS:-}" \
|
||||
${DET_RUN_EXTRA_ARGS:-}
|
||||
|
||||
cp -a results "$OUT"/
|
||||
det_rate=$(python -c "import json;print(json.load(open('results/summary.json'))['determinism_rate'])")
|
||||
printf "determinism_rate=%s\n" "$det_rate" > "$OUT/summary.txt"
|
||||
printf "timestamp=%s\n" "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" >> "$OUT/summary.txt"
|
||||
|
||||
awk -v rate="$det_rate" -v th="$THRESHOLD" 'BEGIN {if (rate+0 < th+0) {printf("determinism_rate %s is below threshold %s\n", rate, th); exit 1}}'
|
||||
|
||||
if [ -n "${DET_REACH_GRAPHS:-}" ]; then
|
||||
echo "[bench-determinism] running reachability dataset hash"
|
||||
reach_graphs=${DET_REACH_GRAPHS}
|
||||
reach_runtime=${DET_REACH_RUNTIME:-}
|
||||
# prefix relative globs with repo root for consistency
|
||||
case "$reach_graphs" in
|
||||
/*) ;;
|
||||
*) reach_graphs="${ROOT}/${reach_graphs}" ;;
|
||||
esac
|
||||
case "$reach_runtime" in
|
||||
/*|"") ;;
|
||||
*) reach_runtime="${ROOT}/${reach_runtime}" ;;
|
||||
esac
|
||||
python run_reachability.py \
|
||||
--graphs ${reach_graphs} \
|
||||
--runtime ${reach_runtime} \
|
||||
--output results
|
||||
# copy reachability outputs
|
||||
cp results/results-reach.csv "$OUT"/ || true
|
||||
cp results/results-reach.json "$OUT"/ || true
|
||||
cp results/dataset.sha256 "$OUT"/ || true
|
||||
fi
|
||||
|
||||
tar -C "$OUT" -czf "$OUT/bench-determinism-artifacts.tgz" .
|
||||
echo "[bench-determinism] artifacts at $OUT"
|
||||
417
devops/tools/bench/populate-findings.py
Normal file
417
devops/tools/bench/populate-findings.py
Normal file
@@ -0,0 +1,417 @@
|
||||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# BENCH-AUTO-401-019: Automate population of src/__Tests/__Benchmarks/findings/** from reachbench fixtures
|
||||
|
||||
"""
|
||||
Populates src/__Tests/__Benchmarks/findings/** with per-CVE VEX decision bundles derived from
|
||||
reachbench fixtures, including reachability evidence, SBOM excerpts, and
|
||||
DSSE envelope stubs.
|
||||
|
||||
Usage:
|
||||
python scripts/bench/populate-findings.py [--fixtures PATH] [--output PATH] [--dry-run]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def blake3_hex(data: bytes) -> str:
|
||||
"""Compute BLAKE3-256 hash (fallback to SHA-256 if blake3 not installed)."""
|
||||
try:
|
||||
import blake3
|
||||
return blake3.blake3(data).hexdigest()
|
||||
except ImportError:
|
||||
return "sha256:" + hashlib.sha256(data).hexdigest()
|
||||
|
||||
|
||||
def sha256_hex(data: bytes) -> str:
|
||||
"""Compute SHA-256 hash."""
|
||||
return hashlib.sha256(data).hexdigest()
|
||||
|
||||
|
||||
def canonical_json(obj: Any) -> str:
|
||||
"""Serialize object to canonical JSON (sorted keys, no extra whitespace for hashes)."""
|
||||
return json.dumps(obj, sort_keys=True, separators=(',', ':'))
|
||||
|
||||
|
||||
def canonical_json_pretty(obj: Any) -> str:
|
||||
"""Serialize object to canonical JSON with indentation for readability."""
|
||||
return json.dumps(obj, sort_keys=True, indent=2)
|
||||
|
||||
|
||||
def load_reachbench_index(fixtures_path: Path) -> dict:
|
||||
"""Load the reachbench INDEX.json."""
|
||||
index_path = fixtures_path / "INDEX.json"
|
||||
if not index_path.exists():
|
||||
raise FileNotFoundError(f"Reachbench INDEX not found: {index_path}")
|
||||
with open(index_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def load_ground_truth(case_path: Path, variant: str) -> dict | None:
|
||||
"""Load ground-truth.json for a variant."""
|
||||
truth_path = case_path / "images" / variant / "reachgraph.truth.json"
|
||||
if not truth_path.exists():
|
||||
return None
|
||||
with open(truth_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def create_openvex_decision(
|
||||
cve_id: str,
|
||||
purl: str,
|
||||
status: str, # "not_affected" or "affected"
|
||||
justification: str | None,
|
||||
evidence_hash: str,
|
||||
timestamp: str
|
||||
) -> dict:
|
||||
"""Create an OpenVEX decision document."""
|
||||
statement = {
|
||||
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||
"@type": "VEX",
|
||||
"author": "StellaOps Bench Automation",
|
||||
"role": "security_team",
|
||||
"timestamp": timestamp,
|
||||
"version": 1,
|
||||
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||
"statements": [
|
||||
{
|
||||
"vulnerability": {
|
||||
"@id": f"https://nvd.nist.gov/vuln/detail/{cve_id}",
|
||||
"name": cve_id,
|
||||
},
|
||||
"products": [
|
||||
{"@id": purl}
|
||||
],
|
||||
"status": status,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
if justification and status == "not_affected":
|
||||
statement["statements"][0]["justification"] = justification
|
||||
|
||||
# Add action_statement for affected
|
||||
if status == "affected":
|
||||
statement["statements"][0]["action_statement"] = "Upgrade to patched version or apply mitigation."
|
||||
|
||||
# Add evidence reference
|
||||
statement["statements"][0]["impact_statement"] = f"Evidence hash: {evidence_hash}"
|
||||
|
||||
return statement
|
||||
|
||||
|
||||
def create_dsse_envelope_stub(payload: dict, payload_type: str = "application/vnd.openvex+json") -> dict:
|
||||
"""Create a DSSE envelope stub (signature placeholder for actual signing)."""
|
||||
payload_json = canonical_json(payload)
|
||||
payload_b64 = __import__('base64').b64encode(payload_json.encode()).decode()
|
||||
|
||||
return {
|
||||
"payloadType": payload_type,
|
||||
"payload": payload_b64,
|
||||
"signatures": [
|
||||
{
|
||||
"keyid": "stella.ops/bench-automation@v1",
|
||||
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def create_metadata(
|
||||
cve_id: str,
|
||||
purl: str,
|
||||
variant: str,
|
||||
case_id: str,
|
||||
ground_truth: dict | None,
|
||||
timestamp: str
|
||||
) -> dict:
|
||||
"""Create metadata.json for a finding."""
|
||||
return {
|
||||
"cve_id": cve_id,
|
||||
"purl": purl,
|
||||
"case_id": case_id,
|
||||
"variant": variant,
|
||||
"reachability_status": "reachable" if variant == "reachable" else "unreachable",
|
||||
"ground_truth_schema": ground_truth.get("schema_version") if ground_truth else None,
|
||||
"generated_at": timestamp,
|
||||
"generator": "scripts/bench/populate-findings.py",
|
||||
"generator_version": "1.0.0"
|
||||
}
|
||||
|
||||
|
||||
def extract_cve_id(case_id: str) -> str:
|
||||
"""Extract CVE ID from case_id, or generate a placeholder."""
|
||||
# Common patterns: log4j -> CVE-2021-44228, curl -> CVE-2023-38545, etc.
|
||||
cve_mapping = {
|
||||
"log4j": "CVE-2021-44228",
|
||||
"curl": "CVE-2023-38545",
|
||||
"kestrel": "CVE-2023-44487",
|
||||
"spring": "CVE-2022-22965",
|
||||
"openssl": "CVE-2022-3602",
|
||||
"glibc": "CVE-2015-7547",
|
||||
}
|
||||
|
||||
for key, cve in cve_mapping.items():
|
||||
if key in case_id.lower():
|
||||
return cve
|
||||
|
||||
# Generate placeholder CVE for unknown cases
|
||||
return f"CVE-BENCH-{case_id.upper()[:8]}"
|
||||
|
||||
|
||||
def extract_purl(case_id: str, case_data: dict) -> str:
|
||||
"""Extract or generate a purl from case data."""
|
||||
# Use case metadata if available
|
||||
if "purl" in case_data:
|
||||
return case_data["purl"]
|
||||
|
||||
# Generate based on case_id patterns
|
||||
lang = case_data.get("language", "unknown")
|
||||
version = case_data.get("version", "1.0.0")
|
||||
|
||||
pkg_type_map = {
|
||||
"java": "maven",
|
||||
"dotnet": "nuget",
|
||||
"go": "golang",
|
||||
"python": "pypi",
|
||||
"rust": "cargo",
|
||||
"native": "generic",
|
||||
}
|
||||
|
||||
pkg_type = pkg_type_map.get(lang, "generic")
|
||||
return f"pkg:{pkg_type}/{case_id}@{version}"
|
||||
|
||||
|
||||
def populate_finding(
|
||||
case_id: str,
|
||||
case_data: dict,
|
||||
case_path: Path,
|
||||
output_dir: Path,
|
||||
timestamp: str,
|
||||
dry_run: bool
|
||||
) -> dict:
|
||||
"""Populate a single CVE finding bundle."""
|
||||
cve_id = extract_cve_id(case_id)
|
||||
purl = extract_purl(case_id, case_data)
|
||||
|
||||
results = {
|
||||
"case_id": case_id,
|
||||
"cve_id": cve_id,
|
||||
"variants_processed": [],
|
||||
"errors": []
|
||||
}
|
||||
|
||||
for variant in ["reachable", "unreachable"]:
|
||||
variant_path = case_path / "images" / variant
|
||||
if not variant_path.exists():
|
||||
continue
|
||||
|
||||
ground_truth = load_ground_truth(case_path, variant)
|
||||
|
||||
# Determine VEX status based on variant
|
||||
if variant == "reachable":
|
||||
vex_status = "affected"
|
||||
justification = None
|
||||
else:
|
||||
vex_status = "not_affected"
|
||||
justification = "vulnerable_code_not_present"
|
||||
|
||||
# Create finding directory
|
||||
finding_id = f"{cve_id}-{variant}"
|
||||
finding_dir = output_dir / finding_id
|
||||
evidence_dir = finding_dir / "evidence"
|
||||
|
||||
if not dry_run:
|
||||
finding_dir.mkdir(parents=True, exist_ok=True)
|
||||
evidence_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create reachability evidence excerpt
|
||||
evidence = {
|
||||
"schema_version": "richgraph-excerpt/v1",
|
||||
"case_id": case_id,
|
||||
"variant": variant,
|
||||
"ground_truth": ground_truth,
|
||||
"paths": ground_truth.get("paths", []) if ground_truth else [],
|
||||
"generated_at": timestamp
|
||||
}
|
||||
evidence_json = canonical_json_pretty(evidence)
|
||||
evidence_hash = blake3_hex(evidence_json.encode())
|
||||
|
||||
if not dry_run:
|
||||
with open(evidence_dir / "reachability.json", 'w', encoding='utf-8') as f:
|
||||
f.write(evidence_json)
|
||||
|
||||
# Create SBOM excerpt
|
||||
sbom = {
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.6",
|
||||
"version": 1,
|
||||
"metadata": {
|
||||
"timestamp": timestamp,
|
||||
"tools": [{"vendor": "StellaOps", "name": "bench-auto", "version": "1.0.0"}]
|
||||
},
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"purl": purl,
|
||||
"name": case_id,
|
||||
"version": case_data.get("version", "1.0.0")
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
if not dry_run:
|
||||
with open(evidence_dir / "sbom.cdx.json", 'w', encoding='utf-8') as f:
|
||||
json.dump(sbom, f, indent=2, sort_keys=True)
|
||||
|
||||
# Create OpenVEX decision
|
||||
openvex = create_openvex_decision(
|
||||
cve_id=cve_id,
|
||||
purl=purl,
|
||||
status=vex_status,
|
||||
justification=justification,
|
||||
evidence_hash=evidence_hash,
|
||||
timestamp=timestamp
|
||||
)
|
||||
|
||||
if not dry_run:
|
||||
with open(finding_dir / "decision.openvex.json", 'w', encoding='utf-8') as f:
|
||||
json.dump(openvex, f, indent=2, sort_keys=True)
|
||||
|
||||
# Create DSSE envelope stub
|
||||
dsse = create_dsse_envelope_stub(openvex)
|
||||
|
||||
if not dry_run:
|
||||
with open(finding_dir / "decision.dsse.json", 'w', encoding='utf-8') as f:
|
||||
json.dump(dsse, f, indent=2, sort_keys=True)
|
||||
|
||||
# Create Rekor placeholder
|
||||
if not dry_run:
|
||||
with open(finding_dir / "rekor.txt", 'w', encoding='utf-8') as f:
|
||||
f.write(f"# Rekor log entry placeholder\n")
|
||||
f.write(f"# Submit DSSE envelope to Rekor to populate this file\n")
|
||||
f.write(f"log_index: PENDING\n")
|
||||
f.write(f"uuid: PENDING\n")
|
||||
f.write(f"timestamp: {timestamp}\n")
|
||||
|
||||
# Create metadata
|
||||
metadata = create_metadata(
|
||||
cve_id=cve_id,
|
||||
purl=purl,
|
||||
variant=variant,
|
||||
case_id=case_id,
|
||||
ground_truth=ground_truth,
|
||||
timestamp=timestamp
|
||||
)
|
||||
|
||||
if not dry_run:
|
||||
with open(finding_dir / "metadata.json", 'w', encoding='utf-8') as f:
|
||||
json.dump(metadata, f, indent=2, sort_keys=True)
|
||||
|
||||
results["variants_processed"].append({
|
||||
"variant": variant,
|
||||
"finding_id": finding_id,
|
||||
"vex_status": vex_status,
|
||||
"evidence_hash": evidence_hash
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Populate src/__Tests/__Benchmarks/findings/** from reachbench fixtures"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--fixtures",
|
||||
type=Path,
|
||||
default=Path("src/__Tests/reachability/fixtures/reachbench-2025-expanded"),
|
||||
help="Path to reachbench fixtures directory"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
type=Path,
|
||||
default=Path("src/__Tests/__Benchmarks/findings"),
|
||||
help="Output directory for findings"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Print what would be created without writing files"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
type=int,
|
||||
default=0,
|
||||
help="Limit number of cases to process (0 = all)"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Resolve paths relative to repo root
|
||||
repo_root = Path(__file__).parent.parent.parent
|
||||
fixtures_path = repo_root / args.fixtures if not args.fixtures.is_absolute() else args.fixtures
|
||||
output_path = repo_root / args.output if not args.output.is_absolute() else args.output
|
||||
|
||||
print(f"Fixtures path: {fixtures_path}")
|
||||
print(f"Output path: {output_path}")
|
||||
print(f"Dry run: {args.dry_run}")
|
||||
|
||||
# Load reachbench index
|
||||
try:
|
||||
index = load_reachbench_index(fixtures_path)
|
||||
except FileNotFoundError as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
cases = index.get("cases", [])
|
||||
if args.limit > 0:
|
||||
cases = cases[:args.limit]
|
||||
|
||||
print(f"Processing {len(cases)} cases...")
|
||||
|
||||
all_results = []
|
||||
for case in cases:
|
||||
case_id = case["id"]
|
||||
case_path_rel = case.get("path", f"cases/{case_id}")
|
||||
case_path = fixtures_path / case_path_rel
|
||||
|
||||
if not case_path.exists():
|
||||
print(f" Warning: Case path not found: {case_path}")
|
||||
continue
|
||||
|
||||
print(f" Processing: {case_id}")
|
||||
result = populate_finding(
|
||||
case_id=case_id,
|
||||
case_data=case,
|
||||
case_path=case_path,
|
||||
output_dir=output_path,
|
||||
timestamp=timestamp,
|
||||
dry_run=args.dry_run
|
||||
)
|
||||
all_results.append(result)
|
||||
|
||||
for v in result["variants_processed"]:
|
||||
print(f" - {v['finding_id']}: {v['vex_status']}")
|
||||
|
||||
# Summary
|
||||
total_findings = sum(len(r["variants_processed"]) for r in all_results)
|
||||
print(f"\nGenerated {total_findings} findings from {len(all_results)} cases")
|
||||
|
||||
if args.dry_run:
|
||||
print("(dry-run mode - no files written)")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
107
devops/tools/bench/run-baseline.sh
Normal file
107
devops/tools/bench/run-baseline.sh
Normal file
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/env bash
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# BENCH-AUTO-401-019: Run baseline benchmark automation
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 [--populate] [--compute] [--compare BASELINE] [--all]"
|
||||
echo ""
|
||||
echo "Run benchmark automation pipeline."
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --populate Populate src/__Tests/__Benchmarks/findings from reachbench fixtures"
|
||||
echo " --compute Compute metrics from findings"
|
||||
echo " --compare BASELINE Compare with baseline scanner results"
|
||||
echo " --all Run all steps (populate + compute)"
|
||||
echo " --dry-run Don't write files (populate only)"
|
||||
echo " --limit N Limit cases processed (populate only)"
|
||||
echo " --help, -h Show this help"
|
||||
exit 1
|
||||
}
|
||||
|
||||
DO_POPULATE=false
|
||||
DO_COMPUTE=false
|
||||
BASELINE_PATH=""
|
||||
DRY_RUN=""
|
||||
LIMIT=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--populate)
|
||||
DO_POPULATE=true
|
||||
shift
|
||||
;;
|
||||
--compute)
|
||||
DO_COMPUTE=true
|
||||
shift
|
||||
;;
|
||||
--compare)
|
||||
BASELINE_PATH="$2"
|
||||
shift 2
|
||||
;;
|
||||
--all)
|
||||
DO_POPULATE=true
|
||||
DO_COMPUTE=true
|
||||
shift
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN="--dry-run"
|
||||
shift
|
||||
;;
|
||||
--limit)
|
||||
LIMIT="--limit $2"
|
||||
shift 2
|
||||
;;
|
||||
--help|-h)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
log_error "Unknown option: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$DO_POPULATE" == false && "$DO_COMPUTE" == false && -z "$BASELINE_PATH" ]]; then
|
||||
log_error "No action specified"
|
||||
usage
|
||||
fi
|
||||
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
# Step 1: Populate findings
|
||||
if [[ "$DO_POPULATE" == true ]]; then
|
||||
log_info "Step 1: Populating findings from reachbench fixtures..."
|
||||
python3 scripts/bench/populate-findings.py $DRY_RUN $LIMIT
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Step 2: Compute metrics
|
||||
if [[ "$DO_COMPUTE" == true ]]; then
|
||||
log_info "Step 2: Computing metrics..."
|
||||
python3 scripts/bench/compute-metrics.py --json
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Step 3: Compare with baseline
|
||||
if [[ -n "$BASELINE_PATH" ]]; then
|
||||
log_info "Step 3: Comparing with baseline..."
|
||||
python3 src/__Tests/__Benchmarks/tools/compare.py --baseline "$BASELINE_PATH" --json
|
||||
echo ""
|
||||
fi
|
||||
|
||||
log_info "Benchmark automation complete!"
|
||||
log_info "Results available in src/__Tests/__Benchmarks/results/"
|
||||
143
devops/tools/callgraph/go/framework.go
Normal file
143
devops/tools/callgraph/go/framework.go
Normal file
@@ -0,0 +1,143 @@
|
||||
// Framework detection for Go projects
|
||||
package main
|
||||
|
||||
import (
|
||||
"golang.org/x/tools/go/ssa"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// FrameworkPattern defines detection patterns for a framework
|
||||
type FrameworkPattern struct {
|
||||
Name string
|
||||
Packages []string
|
||||
EntrypointFns []string
|
||||
HandlerType string
|
||||
}
|
||||
|
||||
// Known Go web frameworks
|
||||
var frameworkPatterns = []FrameworkPattern{
|
||||
{
|
||||
Name: "net/http",
|
||||
Packages: []string{"net/http"},
|
||||
EntrypointFns: []string{"HandleFunc", "Handle", "ListenAndServe"},
|
||||
HandlerType: "http_handler",
|
||||
},
|
||||
{
|
||||
Name: "gin",
|
||||
Packages: []string{"github.com/gin-gonic/gin"},
|
||||
EntrypointFns: []string{"GET", "POST", "PUT", "DELETE", "PATCH", "Run"},
|
||||
HandlerType: "http_handler",
|
||||
},
|
||||
{
|
||||
Name: "echo",
|
||||
Packages: []string{"github.com/labstack/echo"},
|
||||
EntrypointFns: []string{"GET", "POST", "PUT", "DELETE", "PATCH", "Start"},
|
||||
HandlerType: "http_handler",
|
||||
},
|
||||
{
|
||||
Name: "fiber",
|
||||
Packages: []string{"github.com/gofiber/fiber"},
|
||||
EntrypointFns: []string{"Get", "Post", "Put", "Delete", "Patch", "Listen"},
|
||||
HandlerType: "http_handler",
|
||||
},
|
||||
{
|
||||
Name: "chi",
|
||||
Packages: []string{"github.com/go-chi/chi"},
|
||||
EntrypointFns: []string{"Get", "Post", "Put", "Delete", "Patch", "Route"},
|
||||
HandlerType: "http_handler",
|
||||
},
|
||||
{
|
||||
Name: "mux",
|
||||
Packages: []string{"github.com/gorilla/mux"},
|
||||
EntrypointFns: []string{"HandleFunc", "Handle", "NewRouter"},
|
||||
HandlerType: "http_handler",
|
||||
},
|
||||
{
|
||||
Name: "grpc",
|
||||
Packages: []string{"google.golang.org/grpc"},
|
||||
EntrypointFns: []string{"RegisterServer", "NewServer"},
|
||||
HandlerType: "grpc_method",
|
||||
},
|
||||
{
|
||||
Name: "cobra",
|
||||
Packages: []string{"github.com/spf13/cobra"},
|
||||
EntrypointFns: []string{"Execute", "AddCommand", "Run"},
|
||||
HandlerType: "cli_command",
|
||||
},
|
||||
}
|
||||
|
||||
// DetectFramework checks if a function is related to a known framework
|
||||
func DetectFramework(fn *ssa.Function) *FrameworkPattern {
|
||||
if fn.Pkg == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
pkgPath := fn.Pkg.Pkg.Path()
|
||||
|
||||
for _, pattern := range frameworkPatterns {
|
||||
for _, pkg := range pattern.Packages {
|
||||
if strings.Contains(pkgPath, pkg) {
|
||||
return &pattern
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DetectFrameworkEntrypoint checks if a call is a framework route registration
|
||||
func DetectFrameworkEntrypoint(call *ssa.Call) *Entrypoint {
|
||||
callee := call.Call.StaticCallee()
|
||||
if callee == nil || callee.Pkg == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
pkgPath := callee.Pkg.Pkg.Path()
|
||||
fnName := callee.Name()
|
||||
|
||||
for _, pattern := range frameworkPatterns {
|
||||
for _, pkg := range pattern.Packages {
|
||||
if strings.Contains(pkgPath, pkg) {
|
||||
for _, epFn := range pattern.EntrypointFns {
|
||||
if fnName == epFn {
|
||||
nodeID := makeSymbolID(callee)
|
||||
return &Entrypoint{
|
||||
ID: nodeID,
|
||||
Type: pattern.HandlerType,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// IsHTTPHandler checks if a function signature matches http.Handler
|
||||
func IsHTTPHandler(fn *ssa.Function) bool {
|
||||
sig := fn.Signature
|
||||
|
||||
// Check for (http.ResponseWriter, *http.Request) signature
|
||||
if sig.Params().Len() == 2 {
|
||||
p0 := sig.Params().At(0).Type().String()
|
||||
p1 := sig.Params().At(1).Type().String()
|
||||
|
||||
if strings.Contains(p0, "ResponseWriter") && strings.Contains(p1, "Request") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Check for gin.Context, echo.Context, fiber.Ctx, etc.
|
||||
if sig.Params().Len() >= 1 {
|
||||
p0 := sig.Params().At(0).Type().String()
|
||||
if strings.Contains(p0, "gin.Context") ||
|
||||
strings.Contains(p0, "echo.Context") ||
|
||||
strings.Contains(p0, "fiber.Ctx") ||
|
||||
strings.Contains(p0, "chi.") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
12
devops/tools/callgraph/go/go.mod
Normal file
12
devops/tools/callgraph/go/go.mod
Normal file
@@ -0,0 +1,12 @@
|
||||
module github.com/stella-ops/stella-callgraph-go
|
||||
|
||||
go 1.21
|
||||
|
||||
require (
|
||||
golang.org/x/tools v0.16.0
|
||||
)
|
||||
|
||||
require (
|
||||
golang.org/x/mod v0.14.0 // indirect
|
||||
golang.org/x/sys v0.15.0 // indirect
|
||||
)
|
||||
395
devops/tools/callgraph/go/main.go
Normal file
395
devops/tools/callgraph/go/main.go
Normal file
@@ -0,0 +1,395 @@
|
||||
// stella-callgraph-go
|
||||
// Call graph extraction tool for Go projects using SSA analysis.
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/callgraph"
|
||||
"golang.org/x/tools/go/callgraph/cha"
|
||||
"golang.org/x/tools/go/callgraph/rta"
|
||||
"golang.org/x/tools/go/packages"
|
||||
"golang.org/x/tools/go/ssa"
|
||||
"golang.org/x/tools/go/ssa/ssautil"
|
||||
)
|
||||
|
||||
// CallGraphResult is the output structure
|
||||
type CallGraphResult struct {
|
||||
Module string `json:"module"`
|
||||
Nodes []Node `json:"nodes"`
|
||||
Edges []Edge `json:"edges"`
|
||||
Entrypoints []Entrypoint `json:"entrypoints"`
|
||||
}
|
||||
|
||||
// Node represents a function in the call graph
|
||||
type Node struct {
|
||||
ID string `json:"id"`
|
||||
Package string `json:"package"`
|
||||
Name string `json:"name"`
|
||||
Signature string `json:"signature"`
|
||||
Position Position `json:"position"`
|
||||
Visibility string `json:"visibility"`
|
||||
Annotations []string `json:"annotations"`
|
||||
}
|
||||
|
||||
// Edge represents a call between functions
|
||||
type Edge struct {
|
||||
From string `json:"from"`
|
||||
To string `json:"to"`
|
||||
Kind string `json:"kind"`
|
||||
Site Position `json:"site"`
|
||||
}
|
||||
|
||||
// Position in source code
|
||||
type Position struct {
|
||||
File string `json:"file"`
|
||||
Line int `json:"line"`
|
||||
Column int `json:"column"`
|
||||
}
|
||||
|
||||
// Entrypoint represents an entry point function
|
||||
type Entrypoint struct {
|
||||
ID string `json:"id"`
|
||||
Type string `json:"type"`
|
||||
Route string `json:"route,omitempty"`
|
||||
Method string `json:"method,omitempty"`
|
||||
}
|
||||
|
||||
func main() {
|
||||
var (
|
||||
projectPath string
|
||||
algorithm string
|
||||
jsonFormat bool
|
||||
)
|
||||
|
||||
flag.StringVar(&projectPath, "path", ".", "Path to Go project")
|
||||
flag.StringVar(&algorithm, "algo", "cha", "Call graph algorithm: cha, rta, or static")
|
||||
flag.BoolVar(&jsonFormat, "json", false, "Output formatted JSON")
|
||||
flag.Parse()
|
||||
|
||||
if len(flag.Args()) > 0 {
|
||||
projectPath = flag.Args()[0]
|
||||
}
|
||||
|
||||
result, err := analyzeProject(projectPath, algorithm)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
var output []byte
|
||||
if jsonFormat {
|
||||
output, err = json.MarshalIndent(result, "", " ")
|
||||
} else {
|
||||
output, err = json.Marshal(result)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error encoding JSON: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
fmt.Println(string(output))
|
||||
}
|
||||
|
||||
func analyzeProject(projectPath string, algorithm string) (*CallGraphResult, error) {
|
||||
absPath, err := filepath.Abs(projectPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid path: %w", err)
|
||||
}
|
||||
|
||||
// Load packages
|
||||
cfg := &packages.Config{
|
||||
Mode: packages.LoadAllSyntax,
|
||||
Dir: absPath,
|
||||
}
|
||||
|
||||
pkgs, err := packages.Load(cfg, "./...")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load packages: %w", err)
|
||||
}
|
||||
|
||||
if len(pkgs) == 0 {
|
||||
return nil, fmt.Errorf("no packages found")
|
||||
}
|
||||
|
||||
// Check for errors
|
||||
for _, pkg := range pkgs {
|
||||
if len(pkg.Errors) > 0 {
|
||||
// Log but continue
|
||||
for _, e := range pkg.Errors {
|
||||
fmt.Fprintf(os.Stderr, "Warning: %v\n", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build SSA
|
||||
prog, _ := ssautil.AllPackages(pkgs, ssa.SanityCheckFunctions)
|
||||
prog.Build()
|
||||
|
||||
// Extract module name
|
||||
moduleName := extractModuleName(absPath, pkgs)
|
||||
|
||||
// Build call graph using the specified algorithm
|
||||
var cg *callgraph.Graph
|
||||
switch algorithm {
|
||||
case "rta":
|
||||
// RTA (Rapid Type Analysis) - more precise for programs with main
|
||||
mains := ssautil.MainPackages(prog.AllPackages())
|
||||
if len(mains) > 0 {
|
||||
var roots []*ssa.Function
|
||||
for _, main := range mains {
|
||||
if mainFn := main.Func("main"); mainFn != nil {
|
||||
roots = append(roots, mainFn)
|
||||
}
|
||||
if initFn := main.Func("init"); initFn != nil {
|
||||
roots = append(roots, initFn)
|
||||
}
|
||||
}
|
||||
if len(roots) > 0 {
|
||||
rtaResult := rta.Analyze(roots, true)
|
||||
cg = rtaResult.CallGraph
|
||||
}
|
||||
}
|
||||
if cg == nil {
|
||||
// Fall back to CHA if no main packages
|
||||
cg = cha.CallGraph(prog)
|
||||
}
|
||||
case "cha":
|
||||
// CHA (Class Hierarchy Analysis) - sound but less precise
|
||||
cg = cha.CallGraph(prog)
|
||||
default:
|
||||
// Default to CHA
|
||||
cg = cha.CallGraph(prog)
|
||||
}
|
||||
|
||||
// Collect nodes and edges from call graph
|
||||
nodes := make([]Node, 0)
|
||||
edges := make([]Edge, 0)
|
||||
entrypoints := make([]Entrypoint, 0)
|
||||
seenNodes := make(map[string]bool)
|
||||
seenEdges := make(map[string]bool)
|
||||
|
||||
// If we have a call graph, use it for edges
|
||||
if cg != nil {
|
||||
callgraph.GraphVisitEdges(cg, func(edge *callgraph.Edge) error {
|
||||
if edge.Caller.Func == nil || edge.Callee.Func == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
callerID := makeSymbolID(edge.Caller.Func)
|
||||
calleeID := makeSymbolID(edge.Callee.Func)
|
||||
|
||||
// Add caller node if not seen
|
||||
if !seenNodes[callerID] {
|
||||
seenNodes[callerID] = true
|
||||
nodes = append(nodes, makeNodeFromFunction(prog, edge.Caller.Func))
|
||||
}
|
||||
|
||||
// Add callee node if not seen
|
||||
if !seenNodes[calleeID] {
|
||||
seenNodes[calleeID] = true
|
||||
nodes = append(nodes, makeNodeFromFunction(prog, edge.Callee.Func))
|
||||
}
|
||||
|
||||
// Add edge
|
||||
edgeKey := fmt.Sprintf("%s|%s", callerID, calleeID)
|
||||
if !seenEdges[edgeKey] {
|
||||
seenEdges[edgeKey] = true
|
||||
|
||||
kind := "direct"
|
||||
if edge.Site != nil {
|
||||
if _, ok := edge.Site.(*ssa.Go); ok {
|
||||
kind = "goroutine"
|
||||
} else if _, ok := edge.Site.(*ssa.Defer); ok {
|
||||
kind = "defer"
|
||||
}
|
||||
}
|
||||
|
||||
var site Position
|
||||
if edge.Site != nil {
|
||||
pos := prog.Fset.Position(edge.Site.Pos())
|
||||
site = Position{
|
||||
File: pos.Filename,
|
||||
Line: pos.Line,
|
||||
}
|
||||
}
|
||||
|
||||
edges = append(edges, Edge{
|
||||
From: callerID,
|
||||
To: calleeID,
|
||||
Kind: kind,
|
||||
Site: site,
|
||||
})
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// Also scan all functions to find any missing nodes and entrypoints
|
||||
for _, pkg := range prog.AllPackages() {
|
||||
if pkg == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, member := range pkg.Members {
|
||||
fn, ok := member.(*ssa.Function)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
nodeID := makeSymbolID(fn)
|
||||
if !seenNodes[nodeID] {
|
||||
seenNodes[nodeID] = true
|
||||
nodes = append(nodes, makeNodeFromFunction(prog, fn))
|
||||
}
|
||||
|
||||
// Check for entrypoints
|
||||
if ep := detectEntrypoint(fn); ep != nil {
|
||||
entrypoints = append(entrypoints, *ep)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &CallGraphResult{
|
||||
Module: moduleName,
|
||||
Nodes: nodes,
|
||||
Edges: edges,
|
||||
Entrypoints: entrypoints,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func makeNodeFromFunction(prog *ssa.Program, fn *ssa.Function) Node {
|
||||
pos := prog.Fset.Position(fn.Pos())
|
||||
pkgPath := ""
|
||||
if fn.Pkg != nil {
|
||||
pkgPath = fn.Pkg.Pkg.Path()
|
||||
}
|
||||
|
||||
return Node{
|
||||
ID: makeSymbolID(fn),
|
||||
Package: pkgPath,
|
||||
Name: fn.Name(),
|
||||
Signature: fn.Signature.String(),
|
||||
Position: Position{
|
||||
File: pos.Filename,
|
||||
Line: pos.Line,
|
||||
Column: pos.Column,
|
||||
},
|
||||
Visibility: getVisibility(fn.Name()),
|
||||
Annotations: detectAnnotations(fn),
|
||||
}
|
||||
}
|
||||
|
||||
func extractModuleName(projectPath string, pkgs []*packages.Package) string {
|
||||
// Try to get from go.mod
|
||||
goModPath := filepath.Join(projectPath, "go.mod")
|
||||
if data, err := os.ReadFile(goModPath); err == nil {
|
||||
lines := strings.Split(string(data), "\n")
|
||||
for _, line := range lines {
|
||||
if strings.HasPrefix(line, "module ") {
|
||||
return strings.TrimSpace(strings.TrimPrefix(line, "module "))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to first package path
|
||||
if len(pkgs) > 0 {
|
||||
return pkgs[0].PkgPath
|
||||
}
|
||||
|
||||
return filepath.Base(projectPath)
|
||||
}
|
||||
|
||||
func makeSymbolID(fn *ssa.Function) string {
|
||||
if fn.Pkg == nil {
|
||||
return fmt.Sprintf("go:external/%s", fn.Name())
|
||||
}
|
||||
|
||||
pkg := fn.Pkg.Pkg.Path()
|
||||
if fn.Signature.Recv() != nil {
|
||||
// Method
|
||||
recv := fn.Signature.Recv().Type().String()
|
||||
recv = strings.TrimPrefix(recv, "*")
|
||||
if idx := strings.LastIndex(recv, "."); idx >= 0 {
|
||||
recv = recv[idx+1:]
|
||||
}
|
||||
return fmt.Sprintf("go:%s.%s.%s", pkg, recv, fn.Name())
|
||||
}
|
||||
return fmt.Sprintf("go:%s.%s", pkg, fn.Name())
|
||||
}
|
||||
|
||||
func getVisibility(name string) string {
|
||||
if len(name) == 0 {
|
||||
return "private"
|
||||
}
|
||||
if name[0] >= 'A' && name[0] <= 'Z' {
|
||||
return "public"
|
||||
}
|
||||
return "private"
|
||||
}
|
||||
|
||||
func detectAnnotations(fn *ssa.Function) []string {
|
||||
// Go doesn't have annotations, but we can detect patterns
|
||||
annotations := make([]string, 0)
|
||||
|
||||
// Detect handler patterns from naming
|
||||
if strings.HasSuffix(fn.Name(), "Handler") {
|
||||
annotations = append(annotations, "handler")
|
||||
}
|
||||
if strings.HasSuffix(fn.Name(), "Middleware") {
|
||||
annotations = append(annotations, "middleware")
|
||||
}
|
||||
|
||||
return annotations
|
||||
}
|
||||
|
||||
func detectEntrypoint(fn *ssa.Function) *Entrypoint {
|
||||
name := fn.Name()
|
||||
pkg := ""
|
||||
if fn.Pkg != nil {
|
||||
pkg = fn.Pkg.Pkg.Path()
|
||||
}
|
||||
|
||||
nodeID := makeSymbolID(fn)
|
||||
|
||||
// main.main
|
||||
if name == "main" && strings.HasSuffix(pkg, "main") {
|
||||
return &Entrypoint{
|
||||
ID: nodeID,
|
||||
Type: "cli_command",
|
||||
}
|
||||
}
|
||||
|
||||
// init functions
|
||||
if name == "init" {
|
||||
return &Entrypoint{
|
||||
ID: nodeID,
|
||||
Type: "background_job",
|
||||
}
|
||||
}
|
||||
|
||||
// HTTP handler patterns
|
||||
if strings.HasSuffix(name, "Handler") || strings.Contains(name, "Handle") {
|
||||
return &Entrypoint{
|
||||
ID: nodeID,
|
||||
Type: "http_handler",
|
||||
}
|
||||
}
|
||||
|
||||
// gRPC patterns
|
||||
if strings.HasSuffix(name, "Server") && strings.HasPrefix(name, "Register") {
|
||||
return &Entrypoint{
|
||||
ID: nodeID,
|
||||
Type: "grpc_method",
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
178
devops/tools/callgraph/node/framework-detect.js
Normal file
178
devops/tools/callgraph/node/framework-detect.js
Normal file
@@ -0,0 +1,178 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// framework-detect.js
|
||||
// Framework detection patterns for JavaScript/TypeScript projects.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Framework detection patterns
|
||||
*/
|
||||
export const frameworkPatterns = {
|
||||
express: {
|
||||
packageNames: ['express'],
|
||||
patterns: [
|
||||
/const\s+\w+\s*=\s*require\(['"]express['"]\)/,
|
||||
/import\s+\w+\s+from\s+['"]express['"]/,
|
||||
/app\.(get|post|put|delete|patch)\s*\(/
|
||||
],
|
||||
entrypointType: 'http_handler'
|
||||
},
|
||||
|
||||
fastify: {
|
||||
packageNames: ['fastify'],
|
||||
patterns: [
|
||||
/require\(['"]fastify['"]\)/,
|
||||
/import\s+\w+\s+from\s+['"]fastify['"]/,
|
||||
/fastify\.(get|post|put|delete|patch)\s*\(/
|
||||
],
|
||||
entrypointType: 'http_handler'
|
||||
},
|
||||
|
||||
koa: {
|
||||
packageNames: ['koa', '@koa/router'],
|
||||
patterns: [
|
||||
/require\(['"]koa['"]\)/,
|
||||
/import\s+\w+\s+from\s+['"]koa['"]/,
|
||||
/router\.(get|post|put|delete|patch)\s*\(/
|
||||
],
|
||||
entrypointType: 'http_handler'
|
||||
},
|
||||
|
||||
hapi: {
|
||||
packageNames: ['@hapi/hapi'],
|
||||
patterns: [
|
||||
/require\(['"]@hapi\/hapi['"]\)/,
|
||||
/import\s+\w+\s+from\s+['"]@hapi\/hapi['"]/,
|
||||
/server\.route\s*\(/
|
||||
],
|
||||
entrypointType: 'http_handler'
|
||||
},
|
||||
|
||||
nestjs: {
|
||||
packageNames: ['@nestjs/core', '@nestjs/common'],
|
||||
patterns: [
|
||||
/@Controller\s*\(/,
|
||||
/@Get\s*\(/,
|
||||
/@Post\s*\(/,
|
||||
/@Put\s*\(/,
|
||||
/@Delete\s*\(/,
|
||||
/@Patch\s*\(/
|
||||
],
|
||||
entrypointType: 'http_handler'
|
||||
},
|
||||
|
||||
socketio: {
|
||||
packageNames: ['socket.io'],
|
||||
patterns: [
|
||||
/require\(['"]socket\.io['"]\)/,
|
||||
/import\s+\w+\s+from\s+['"]socket\.io['"]/,
|
||||
/io\.on\s*\(\s*['"]connection['"]/,
|
||||
/socket\.on\s*\(/
|
||||
],
|
||||
entrypointType: 'websocket_handler'
|
||||
},
|
||||
|
||||
awsLambda: {
|
||||
packageNames: ['aws-lambda', '@types/aws-lambda'],
|
||||
patterns: [
|
||||
/exports\.handler\s*=/,
|
||||
/export\s+(const|async function)\s+handler/,
|
||||
/module\.exports\.handler/,
|
||||
/APIGatewayProxyHandler/,
|
||||
/APIGatewayEvent/
|
||||
],
|
||||
entrypointType: 'lambda'
|
||||
},
|
||||
|
||||
azureFunctions: {
|
||||
packageNames: ['@azure/functions'],
|
||||
patterns: [
|
||||
/require\(['"]@azure\/functions['"]\)/,
|
||||
/import\s+\w+\s+from\s+['"]@azure\/functions['"]/,
|
||||
/app\.(http|timer|queue|blob)\s*\(/
|
||||
],
|
||||
entrypointType: 'cloud_function'
|
||||
},
|
||||
|
||||
gcpFunctions: {
|
||||
packageNames: ['@google-cloud/functions-framework'],
|
||||
patterns: [
|
||||
/require\(['"]@google-cloud\/functions-framework['"]\)/,
|
||||
/functions\.(http|cloudEvent)\s*\(/
|
||||
],
|
||||
entrypointType: 'cloud_function'
|
||||
},
|
||||
|
||||
electron: {
|
||||
packageNames: ['electron'],
|
||||
patterns: [
|
||||
/require\(['"]electron['"]\)/,
|
||||
/import\s+\{[^}]*\}\s+from\s+['"]electron['"]/,
|
||||
/ipcMain\.on\s*\(/,
|
||||
/ipcRenderer\.on\s*\(/
|
||||
],
|
||||
entrypointType: 'event_handler'
|
||||
},
|
||||
|
||||
grpc: {
|
||||
packageNames: ['@grpc/grpc-js', 'grpc'],
|
||||
patterns: [
|
||||
/require\(['"]@grpc\/grpc-js['"]\)/,
|
||||
/addService\s*\(/,
|
||||
/loadPackageDefinition\s*\(/
|
||||
],
|
||||
entrypointType: 'grpc_method'
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Detect frameworks from package.json dependencies
|
||||
* @param {object} packageJson
|
||||
* @returns {string[]}
|
||||
*/
|
||||
export function detectFrameworks(packageJson) {
|
||||
const detected = [];
|
||||
const allDeps = {
|
||||
...packageJson.dependencies,
|
||||
...packageJson.devDependencies
|
||||
};
|
||||
|
||||
for (const [framework, config] of Object.entries(frameworkPatterns)) {
|
||||
for (const pkgName of config.packageNames) {
|
||||
if (allDeps[pkgName]) {
|
||||
detected.push(framework);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return detected;
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect frameworks from source code patterns
|
||||
* @param {string} content
|
||||
* @returns {string[]}
|
||||
*/
|
||||
export function detectFrameworksFromCode(content) {
|
||||
const detected = [];
|
||||
|
||||
for (const [framework, config] of Object.entries(frameworkPatterns)) {
|
||||
for (const pattern of config.patterns) {
|
||||
if (pattern.test(content)) {
|
||||
detected.push(framework);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return detected;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get entrypoint type for a detected framework
|
||||
* @param {string} framework
|
||||
* @returns {string}
|
||||
*/
|
||||
export function getEntrypointType(framework) {
|
||||
return frameworkPatterns[framework]?.entrypointType || 'unknown';
|
||||
}
|
||||
478
devops/tools/callgraph/node/index.js
Normal file
478
devops/tools/callgraph/node/index.js
Normal file
@@ -0,0 +1,478 @@
|
||||
#!/usr/bin/env node
|
||||
// -----------------------------------------------------------------------------
|
||||
// stella-callgraph-node
|
||||
// Call graph extraction tool for JavaScript/TypeScript projects.
|
||||
// Uses Babel AST for static analysis.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
import { readFileSync, readdirSync, statSync, existsSync } from 'fs';
|
||||
import { join, extname, relative, dirname } from 'path';
|
||||
import { parse } from '@babel/parser';
|
||||
import traverse from '@babel/traverse';
|
||||
import { buildSinkLookup, matchSink } from './sink-detect.js';
|
||||
|
||||
// Pre-build sink lookup for fast detection
|
||||
const sinkLookup = buildSinkLookup();
|
||||
|
||||
/**
|
||||
* Main entry point
|
||||
*/
|
||||
async function main() {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0 || args.includes('--help')) {
|
||||
printUsage();
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const targetPath = args[0];
|
||||
const outputFormat = args.includes('--json') ? 'json' : 'ndjson';
|
||||
|
||||
try {
|
||||
const result = await analyzeProject(targetPath);
|
||||
|
||||
if (outputFormat === 'json') {
|
||||
console.log(JSON.stringify(result, null, 2));
|
||||
} else {
|
||||
console.log(JSON.stringify(result));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error: ${error.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function printUsage() {
|
||||
console.log(`
|
||||
stella-callgraph-node - JavaScript/TypeScript call graph extractor
|
||||
|
||||
Usage:
|
||||
stella-callgraph-node <project-path> [options]
|
||||
|
||||
Options:
|
||||
--json Output formatted JSON instead of NDJSON
|
||||
--help Show this help message
|
||||
|
||||
Example:
|
||||
stella-callgraph-node ./my-express-app --json
|
||||
`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze a JavaScript/TypeScript project
|
||||
* @param {string} projectPath
|
||||
* @returns {Promise<CallGraphResult>}
|
||||
*/
|
||||
async function analyzeProject(projectPath) {
|
||||
const packageJsonPath = join(projectPath, 'package.json');
|
||||
let packageInfo = { name: 'unknown', version: '0.0.0' };
|
||||
|
||||
if (existsSync(packageJsonPath)) {
|
||||
const content = readFileSync(packageJsonPath, 'utf-8');
|
||||
packageInfo = JSON.parse(content);
|
||||
}
|
||||
|
||||
const sourceFiles = findSourceFiles(projectPath);
|
||||
const nodes = [];
|
||||
const edges = [];
|
||||
const entrypoints = [];
|
||||
const sinks = [];
|
||||
|
||||
for (const file of sourceFiles) {
|
||||
try {
|
||||
const content = readFileSync(file, 'utf-8');
|
||||
const relativePath = relative(projectPath, file);
|
||||
const result = analyzeFile(content, relativePath, packageInfo.name);
|
||||
|
||||
nodes.push(...result.nodes);
|
||||
edges.push(...result.edges);
|
||||
entrypoints.push(...result.entrypoints);
|
||||
sinks.push(...result.sinks);
|
||||
} catch (error) {
|
||||
// Skip files that can't be parsed
|
||||
console.error(`Warning: Could not parse ${file}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
module: packageInfo.name,
|
||||
version: packageInfo.version,
|
||||
nodes: deduplicateNodes(nodes),
|
||||
edges: deduplicateEdges(edges),
|
||||
entrypoints,
|
||||
sinks: deduplicateSinks(sinks)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all JavaScript/TypeScript source files
|
||||
* @param {string} dir
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function findSourceFiles(dir) {
|
||||
const files = [];
|
||||
const excludeDirs = ['node_modules', 'dist', 'build', '.git', 'coverage', '__tests__'];
|
||||
const extensions = ['.js', '.jsx', '.ts', '.tsx', '.mjs', '.cjs'];
|
||||
|
||||
function walk(currentDir) {
|
||||
const entries = readdirSync(currentDir);
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = join(currentDir, entry);
|
||||
const stat = statSync(fullPath);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
if (!excludeDirs.includes(entry) && !entry.startsWith('.')) {
|
||||
walk(fullPath);
|
||||
}
|
||||
} else if (stat.isFile()) {
|
||||
const ext = extname(entry);
|
||||
if (extensions.includes(ext) && !entry.includes('.test.') && !entry.includes('.spec.')) {
|
||||
files.push(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walk(dir);
|
||||
return files.sort();
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze a single source file
|
||||
* @param {string} content
|
||||
* @param {string} relativePath
|
||||
* @param {string} packageName
|
||||
* @returns {{ nodes: any[], edges: any[], entrypoints: any[] }}
|
||||
*/
|
||||
function analyzeFile(content, relativePath, packageName) {
|
||||
const nodes = [];
|
||||
const edges = [];
|
||||
const entrypoints = [];
|
||||
const sinks = [];
|
||||
const moduleBase = relativePath.replace(/\.[^.]+$/, '').replace(/\\/g, '/');
|
||||
|
||||
// Parse with Babel
|
||||
const ast = parse(content, {
|
||||
sourceType: 'module',
|
||||
plugins: [
|
||||
'typescript',
|
||||
'jsx',
|
||||
'decorators-legacy',
|
||||
'classProperties',
|
||||
'classPrivateProperties',
|
||||
'classPrivateMethods',
|
||||
'dynamicImport',
|
||||
'optionalChaining',
|
||||
'nullishCoalescingOperator'
|
||||
],
|
||||
errorRecovery: true
|
||||
});
|
||||
|
||||
// Track current function context for edges
|
||||
let currentFunction = null;
|
||||
|
||||
traverse.default(ast, {
|
||||
// Function declarations
|
||||
FunctionDeclaration(path) {
|
||||
const name = path.node.id?.name;
|
||||
if (!name) return;
|
||||
|
||||
const nodeId = `js:${packageName}/${moduleBase}.${name}`;
|
||||
const isExported = path.parent.type === 'ExportNamedDeclaration' ||
|
||||
path.parent.type === 'ExportDefaultDeclaration';
|
||||
|
||||
nodes.push({
|
||||
id: nodeId,
|
||||
package: packageName,
|
||||
name,
|
||||
signature: getFunctionSignature(path.node),
|
||||
position: {
|
||||
file: relativePath,
|
||||
line: path.node.loc?.start.line || 0,
|
||||
column: path.node.loc?.start.column || 0
|
||||
},
|
||||
visibility: isExported ? 'public' : 'private',
|
||||
annotations: []
|
||||
});
|
||||
|
||||
// Check for route handlers
|
||||
const routeInfo = detectRouteHandler(path);
|
||||
if (routeInfo) {
|
||||
entrypoints.push({
|
||||
id: nodeId,
|
||||
type: routeInfo.type,
|
||||
route: routeInfo.route,
|
||||
method: routeInfo.method
|
||||
});
|
||||
}
|
||||
|
||||
currentFunction = nodeId;
|
||||
},
|
||||
|
||||
// Arrow functions assigned to variables
|
||||
VariableDeclarator(path) {
|
||||
if (path.node.init?.type === 'ArrowFunctionExpression' ||
|
||||
path.node.init?.type === 'FunctionExpression') {
|
||||
|
||||
const name = path.node.id?.name;
|
||||
if (!name) return;
|
||||
|
||||
const nodeId = `js:${packageName}/${moduleBase}.${name}`;
|
||||
const parent = path.parentPath?.parent;
|
||||
const isExported = parent?.type === 'ExportNamedDeclaration';
|
||||
|
||||
nodes.push({
|
||||
id: nodeId,
|
||||
package: packageName,
|
||||
name,
|
||||
signature: getFunctionSignature(path.node.init),
|
||||
position: {
|
||||
file: relativePath,
|
||||
line: path.node.loc?.start.line || 0,
|
||||
column: path.node.loc?.start.column || 0
|
||||
},
|
||||
visibility: isExported ? 'public' : 'private',
|
||||
annotations: []
|
||||
});
|
||||
|
||||
currentFunction = nodeId;
|
||||
}
|
||||
},
|
||||
|
||||
// Class methods
|
||||
ClassMethod(path) {
|
||||
const className = path.parentPath?.parent?.id?.name;
|
||||
const methodName = path.node.key?.name;
|
||||
if (!className || !methodName) return;
|
||||
|
||||
const nodeId = `js:${packageName}/${moduleBase}.${className}.${methodName}`;
|
||||
|
||||
nodes.push({
|
||||
id: nodeId,
|
||||
package: packageName,
|
||||
name: `${className}.${methodName}`,
|
||||
signature: getFunctionSignature(path.node),
|
||||
position: {
|
||||
file: relativePath,
|
||||
line: path.node.loc?.start.line || 0,
|
||||
column: path.node.loc?.start.column || 0
|
||||
},
|
||||
visibility: path.node.accessibility || 'public',
|
||||
annotations: getDecorators(path)
|
||||
});
|
||||
|
||||
// Check for controller/handler patterns
|
||||
if (className.endsWith('Controller') || className.endsWith('Handler')) {
|
||||
entrypoints.push({
|
||||
id: nodeId,
|
||||
type: 'http_handler',
|
||||
route: null,
|
||||
method: null
|
||||
});
|
||||
}
|
||||
|
||||
currentFunction = nodeId;
|
||||
},
|
||||
|
||||
// Call expressions (edges)
|
||||
CallExpression(path) {
|
||||
if (!currentFunction) return;
|
||||
|
||||
const callee = path.node.callee;
|
||||
let targetId = null;
|
||||
let objName = null;
|
||||
let methodName = null;
|
||||
|
||||
if (callee.type === 'Identifier') {
|
||||
targetId = `js:${packageName}/${moduleBase}.${callee.name}`;
|
||||
methodName = callee.name;
|
||||
} else if (callee.type === 'MemberExpression') {
|
||||
objName = callee.object?.name || 'unknown';
|
||||
methodName = callee.property?.name || 'unknown';
|
||||
targetId = `js:external/${objName}.${methodName}`;
|
||||
}
|
||||
|
||||
if (targetId) {
|
||||
edges.push({
|
||||
from: currentFunction,
|
||||
to: targetId,
|
||||
kind: 'direct',
|
||||
site: {
|
||||
file: relativePath,
|
||||
line: path.node.loc?.start.line || 0
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Detect security sinks
|
||||
if (methodName) {
|
||||
const sinkMatch = matchSink(objName || methodName, methodName, sinkLookup);
|
||||
if (sinkMatch) {
|
||||
sinks.push({
|
||||
caller: currentFunction,
|
||||
category: sinkMatch.category,
|
||||
method: `${objName ? objName + '.' : ''}${methodName}`,
|
||||
site: {
|
||||
file: relativePath,
|
||||
line: path.node.loc?.start.line || 0,
|
||||
column: path.node.loc?.start.column || 0
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Detect Express/Fastify route registration
|
||||
detectRouteRegistration(path, entrypoints, packageName, moduleBase, relativePath);
|
||||
}
|
||||
});
|
||||
|
||||
return { nodes, edges, entrypoints, sinks };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get function signature string
|
||||
* @param {object} node
|
||||
* @returns {string}
|
||||
*/
|
||||
function getFunctionSignature(node) {
|
||||
const params = node.params?.map(p => {
|
||||
if (p.type === 'Identifier') {
|
||||
return p.name;
|
||||
} else if (p.type === 'AssignmentPattern') {
|
||||
return p.left?.name || 'arg';
|
||||
} else if (p.type === 'RestElement') {
|
||||
return `...${p.argument?.name || 'args'}`;
|
||||
}
|
||||
return 'arg';
|
||||
}) || [];
|
||||
|
||||
const isAsync = node.async ? 'async ' : '';
|
||||
return `${isAsync}(${params.join(', ')})`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get decorators from a path
|
||||
* @param {object} path
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function getDecorators(path) {
|
||||
const decorators = path.node.decorators || [];
|
||||
return decorators.map(d => {
|
||||
if (d.expression?.callee?.name) {
|
||||
return `@${d.expression.callee.name}`;
|
||||
} else if (d.expression?.name) {
|
||||
return `@${d.expression.name}`;
|
||||
}
|
||||
return '@unknown';
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect if function is a route handler
|
||||
* @param {object} path
|
||||
* @returns {{ type: string, route: string | null, method: string | null } | null}
|
||||
*/
|
||||
function detectRouteHandler(path) {
|
||||
const name = path.node.id?.name?.toLowerCase();
|
||||
|
||||
if (!name) return null;
|
||||
|
||||
// Common handler naming patterns
|
||||
if (name.includes('handler') || name.includes('controller')) {
|
||||
return { type: 'http_handler', route: null, method: null };
|
||||
}
|
||||
|
||||
// Lambda handler pattern
|
||||
if (name === 'handler' || name === 'main') {
|
||||
return { type: 'lambda', route: null, method: null };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect Express/Fastify route registration
|
||||
* @param {object} path
|
||||
* @param {any[]} entrypoints
|
||||
* @param {string} packageName
|
||||
* @param {string} moduleBase
|
||||
* @param {string} relativePath
|
||||
*/
|
||||
function detectRouteRegistration(path, entrypoints, packageName, moduleBase, relativePath) {
|
||||
const callee = path.node.callee;
|
||||
|
||||
if (callee.type !== 'MemberExpression') return;
|
||||
|
||||
const methodName = callee.property?.name?.toLowerCase();
|
||||
const httpMethods = ['get', 'post', 'put', 'delete', 'patch', 'options', 'head'];
|
||||
|
||||
if (!httpMethods.includes(methodName)) return;
|
||||
|
||||
// Get route path from first argument
|
||||
const firstArg = path.node.arguments?.[0];
|
||||
let routePath = null;
|
||||
|
||||
if (firstArg?.type === 'StringLiteral') {
|
||||
routePath = firstArg.value;
|
||||
}
|
||||
|
||||
if (routePath) {
|
||||
const handlerName = `${methodName.toUpperCase()}_${routePath.replace(/[/:{}*?]/g, '_')}`;
|
||||
const nodeId = `js:${packageName}/${moduleBase}.${handlerName}`;
|
||||
|
||||
entrypoints.push({
|
||||
id: nodeId,
|
||||
type: 'http_handler',
|
||||
route: routePath,
|
||||
method: methodName.toUpperCase()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove duplicate nodes
|
||||
* @param {any[]} nodes
|
||||
* @returns {any[]}
|
||||
*/
|
||||
function deduplicateNodes(nodes) {
|
||||
const seen = new Set();
|
||||
return nodes.filter(n => {
|
||||
if (seen.has(n.id)) return false;
|
||||
seen.add(n.id);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove duplicate edges
|
||||
* @param {any[]} edges
|
||||
* @returns {any[]}
|
||||
*/
|
||||
function deduplicateEdges(edges) {
|
||||
const seen = new Set();
|
||||
return edges.filter(e => {
|
||||
const key = `${e.from}|${e.to}`;
|
||||
if (seen.has(key)) return false;
|
||||
seen.add(key);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove duplicate sinks
|
||||
* @param {any[]} sinks
|
||||
* @returns {any[]}
|
||||
*/
|
||||
function deduplicateSinks(sinks) {
|
||||
const seen = new Set();
|
||||
return sinks.filter(s => {
|
||||
const key = `${s.caller}|${s.category}|${s.method}|${s.site.file}:${s.site.line}`;
|
||||
if (seen.has(key)) return false;
|
||||
seen.add(key);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
// Run
|
||||
main().catch(console.error);
|
||||
675
devops/tools/callgraph/node/index.test.js
Normal file
675
devops/tools/callgraph/node/index.test.js
Normal file
@@ -0,0 +1,675 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// index.test.js
|
||||
// Sprint: SPRINT_3600_0004_0001 (Node.js Babel Integration)
|
||||
// Tasks: NODE-017, NODE-018 - Unit tests for AST parsing and entrypoint detection
|
||||
// Description: Tests for call graph extraction from JavaScript/TypeScript.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
import { test, describe, beforeEach } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { parse } from '@babel/parser';
|
||||
import traverse from '@babel/traverse';
|
||||
|
||||
// Test utilities for AST parsing
|
||||
function parseCode(code, options = {}) {
|
||||
return parse(code, {
|
||||
sourceType: 'module',
|
||||
plugins: [
|
||||
'typescript',
|
||||
'jsx',
|
||||
'decorators-legacy',
|
||||
'classProperties',
|
||||
'classPrivateProperties',
|
||||
'classPrivateMethods',
|
||||
'dynamicImport',
|
||||
'optionalChaining',
|
||||
'nullishCoalescingOperator'
|
||||
],
|
||||
errorRecovery: true,
|
||||
...options
|
||||
});
|
||||
}
|
||||
|
||||
describe('Babel Parser Integration', () => {
|
||||
test('parses simple JavaScript function', () => {
|
||||
const code = `
|
||||
function hello(name) {
|
||||
return 'Hello, ' + name;
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
assert.ok(ast);
|
||||
assert.equal(ast.type, 'File');
|
||||
assert.ok(ast.program.body.length > 0);
|
||||
});
|
||||
|
||||
test('parses arrow function', () => {
|
||||
const code = `
|
||||
const greet = (name) => {
|
||||
return \`Hello, \${name}\`;
|
||||
};
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
assert.ok(ast);
|
||||
|
||||
let foundArrow = false;
|
||||
traverse.default(ast, {
|
||||
ArrowFunctionExpression() {
|
||||
foundArrow = true;
|
||||
}
|
||||
});
|
||||
assert.ok(foundArrow, 'Should find arrow function');
|
||||
});
|
||||
|
||||
test('parses async function', () => {
|
||||
const code = `
|
||||
async function fetchData(url) {
|
||||
const response = await fetch(url);
|
||||
return response.json();
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
let isAsync = false;
|
||||
traverse.default(ast, {
|
||||
FunctionDeclaration(path) {
|
||||
isAsync = path.node.async;
|
||||
}
|
||||
});
|
||||
assert.ok(isAsync, 'Should detect async function');
|
||||
});
|
||||
|
||||
test('parses class with methods', () => {
|
||||
const code = `
|
||||
class UserController {
|
||||
async getUser(id) {
|
||||
return this.userService.findById(id);
|
||||
}
|
||||
|
||||
async createUser(data) {
|
||||
return this.userService.create(data);
|
||||
}
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const methods = [];
|
||||
traverse.default(ast, {
|
||||
ClassMethod(path) {
|
||||
methods.push(path.node.key.name);
|
||||
}
|
||||
});
|
||||
assert.deepEqual(methods.sort(), ['createUser', 'getUser']);
|
||||
});
|
||||
|
||||
test('parses TypeScript with types', () => {
|
||||
const code = `
|
||||
interface User {
|
||||
id: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
function getUser(id: string): Promise<User> {
|
||||
return db.query<User>('SELECT * FROM users WHERE id = $1', [id]);
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
assert.ok(ast);
|
||||
|
||||
let foundFunction = false;
|
||||
traverse.default(ast, {
|
||||
FunctionDeclaration(path) {
|
||||
if (path.node.id.name === 'getUser') {
|
||||
foundFunction = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
assert.ok(foundFunction, 'Should parse TypeScript function');
|
||||
});
|
||||
|
||||
test('parses JSX components', () => {
|
||||
const code = `
|
||||
function Button({ onClick, children }) {
|
||||
return <button onClick={onClick}>{children}</button>;
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
let foundJSX = false;
|
||||
traverse.default(ast, {
|
||||
JSXElement() {
|
||||
foundJSX = true;
|
||||
}
|
||||
});
|
||||
assert.ok(foundJSX, 'Should parse JSX');
|
||||
});
|
||||
|
||||
test('parses decorators', () => {
|
||||
const code = `
|
||||
@Controller('/users')
|
||||
class UserController {
|
||||
@Get('/:id')
|
||||
async getUser(@Param('id') id: string) {
|
||||
return this.userService.findById(id);
|
||||
}
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const decorators = [];
|
||||
traverse.default(ast, {
|
||||
ClassDeclaration(path) {
|
||||
if (path.node.decorators) {
|
||||
decorators.push(...path.node.decorators.map(d =>
|
||||
d.expression?.callee?.name || d.expression?.name
|
||||
));
|
||||
}
|
||||
},
|
||||
ClassMethod(path) {
|
||||
if (path.node.decorators) {
|
||||
decorators.push(...path.node.decorators.map(d =>
|
||||
d.expression?.callee?.name || d.expression?.name
|
||||
));
|
||||
}
|
||||
}
|
||||
});
|
||||
assert.ok(decorators.includes('Controller'));
|
||||
assert.ok(decorators.includes('Get'));
|
||||
});
|
||||
|
||||
test('parses dynamic imports', () => {
|
||||
const code = `
|
||||
async function loadModule(name) {
|
||||
const module = await import(\`./modules/\${name}\`);
|
||||
return module.default;
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
let foundDynamicImport = false;
|
||||
traverse.default(ast, {
|
||||
Import() {
|
||||
foundDynamicImport = true;
|
||||
}
|
||||
});
|
||||
assert.ok(foundDynamicImport, 'Should detect dynamic import');
|
||||
});
|
||||
|
||||
test('parses optional chaining', () => {
|
||||
const code = `
|
||||
const name = user?.profile?.name ?? 'Anonymous';
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
let foundOptionalChain = false;
|
||||
traverse.default(ast, {
|
||||
OptionalMemberExpression() {
|
||||
foundOptionalChain = true;
|
||||
}
|
||||
});
|
||||
assert.ok(foundOptionalChain, 'Should parse optional chaining');
|
||||
});
|
||||
|
||||
test('parses class private fields', () => {
|
||||
const code = `
|
||||
class Counter {
|
||||
#count = 0;
|
||||
|
||||
increment() {
|
||||
this.#count++;
|
||||
}
|
||||
|
||||
get value() {
|
||||
return this.#count;
|
||||
}
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
let foundPrivateField = false;
|
||||
traverse.default(ast, {
|
||||
ClassPrivateProperty() {
|
||||
foundPrivateField = true;
|
||||
}
|
||||
});
|
||||
assert.ok(foundPrivateField, 'Should parse private class field');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Function Declaration Extraction', () => {
|
||||
test('extracts function name', () => {
|
||||
const code = `
|
||||
function processRequest(req, res) {
|
||||
res.json({ status: 'ok' });
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
let functionName = null;
|
||||
traverse.default(ast, {
|
||||
FunctionDeclaration(path) {
|
||||
functionName = path.node.id.name;
|
||||
}
|
||||
});
|
||||
assert.equal(functionName, 'processRequest');
|
||||
});
|
||||
|
||||
test('extracts function parameters', () => {
|
||||
const code = `
|
||||
function greet(firstName, lastName, options = {}) {
|
||||
return \`Hello, \${firstName} \${lastName}\`;
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
let params = [];
|
||||
traverse.default(ast, {
|
||||
FunctionDeclaration(path) {
|
||||
params = path.node.params.map(p => {
|
||||
if (p.type === 'Identifier') return p.name;
|
||||
if (p.type === 'AssignmentPattern') return p.left.name;
|
||||
return 'unknown';
|
||||
});
|
||||
}
|
||||
});
|
||||
assert.deepEqual(params, ['firstName', 'lastName', 'options']);
|
||||
});
|
||||
|
||||
test('detects exported functions', () => {
|
||||
const code = `
|
||||
export function publicFunction() {}
|
||||
function privateFunction() {}
|
||||
export default function defaultFunction() {}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const functions = { public: [], private: [] };
|
||||
traverse.default(ast, {
|
||||
FunctionDeclaration(path) {
|
||||
const name = path.node.id?.name;
|
||||
if (!name) return;
|
||||
|
||||
const isExported =
|
||||
path.parent.type === 'ExportNamedDeclaration' ||
|
||||
path.parent.type === 'ExportDefaultDeclaration';
|
||||
|
||||
if (isExported) {
|
||||
functions.public.push(name);
|
||||
} else {
|
||||
functions.private.push(name);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assert.deepEqual(functions.public.sort(), ['defaultFunction', 'publicFunction']);
|
||||
assert.deepEqual(functions.private, ['privateFunction']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Call Expression Extraction', () => {
|
||||
test('extracts direct function calls', () => {
|
||||
const code = `
|
||||
function main() {
|
||||
helper();
|
||||
processData();
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const calls = [];
|
||||
traverse.default(ast, {
|
||||
CallExpression(path) {
|
||||
if (path.node.callee.type === 'Identifier') {
|
||||
calls.push(path.node.callee.name);
|
||||
}
|
||||
}
|
||||
});
|
||||
assert.deepEqual(calls.sort(), ['helper', 'processData']);
|
||||
});
|
||||
|
||||
test('extracts method calls', () => {
|
||||
const code = `
|
||||
function handler() {
|
||||
db.query('SELECT * FROM users');
|
||||
fs.readFile('./config.json');
|
||||
console.log('done');
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const methodCalls = [];
|
||||
traverse.default(ast, {
|
||||
CallExpression(path) {
|
||||
if (path.node.callee.type === 'MemberExpression') {
|
||||
const obj = path.node.callee.object.name;
|
||||
const method = path.node.callee.property.name;
|
||||
methodCalls.push(`${obj}.${method}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
assert.ok(methodCalls.includes('db.query'));
|
||||
assert.ok(methodCalls.includes('fs.readFile'));
|
||||
assert.ok(methodCalls.includes('console.log'));
|
||||
});
|
||||
|
||||
test('extracts chained method calls', () => {
|
||||
const code = `
|
||||
const result = data
|
||||
.filter(x => x.active)
|
||||
.map(x => x.name)
|
||||
.join(', ');
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const methods = [];
|
||||
traverse.default(ast, {
|
||||
CallExpression(path) {
|
||||
if (path.node.callee.type === 'MemberExpression') {
|
||||
const method = path.node.callee.property.name;
|
||||
methods.push(method);
|
||||
}
|
||||
}
|
||||
});
|
||||
assert.ok(methods.includes('filter'));
|
||||
assert.ok(methods.includes('map'));
|
||||
assert.ok(methods.includes('join'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Framework Entrypoint Detection', () => {
|
||||
test('detects Express route handlers', () => {
|
||||
const code = `
|
||||
const express = require('express');
|
||||
const app = express();
|
||||
|
||||
app.get('/users', (req, res) => {
|
||||
res.json(users);
|
||||
});
|
||||
|
||||
app.post('/users', async (req, res) => {
|
||||
const user = await createUser(req.body);
|
||||
res.json(user);
|
||||
});
|
||||
|
||||
app.delete('/users/:id', (req, res) => {
|
||||
deleteUser(req.params.id);
|
||||
res.sendStatus(204);
|
||||
});
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const routes = [];
|
||||
traverse.default(ast, {
|
||||
CallExpression(path) {
|
||||
if (path.node.callee.type === 'MemberExpression') {
|
||||
const method = path.node.callee.property.name?.toLowerCase();
|
||||
const httpMethods = ['get', 'post', 'put', 'delete', 'patch'];
|
||||
|
||||
if (httpMethods.includes(method)) {
|
||||
const routeArg = path.node.arguments[0];
|
||||
if (routeArg?.type === 'StringLiteral') {
|
||||
routes.push({ method: method.toUpperCase(), path: routeArg.value });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assert.equal(routes.length, 3);
|
||||
assert.ok(routes.some(r => r.method === 'GET' && r.path === '/users'));
|
||||
assert.ok(routes.some(r => r.method === 'POST' && r.path === '/users'));
|
||||
assert.ok(routes.some(r => r.method === 'DELETE' && r.path === '/users/:id'));
|
||||
});
|
||||
|
||||
test('detects Fastify route handlers', () => {
|
||||
const code = `
|
||||
const fastify = require('fastify')();
|
||||
|
||||
fastify.get('/health', async (request, reply) => {
|
||||
return { status: 'ok' };
|
||||
});
|
||||
|
||||
fastify.route({
|
||||
method: 'POST',
|
||||
url: '/items',
|
||||
handler: async (request, reply) => {
|
||||
return { id: 1 };
|
||||
}
|
||||
});
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const routes = [];
|
||||
traverse.default(ast, {
|
||||
CallExpression(path) {
|
||||
if (path.node.callee.type === 'MemberExpression') {
|
||||
const method = path.node.callee.property.name?.toLowerCase();
|
||||
|
||||
if (['get', 'post', 'put', 'delete', 'patch', 'route'].includes(method)) {
|
||||
const routeArg = path.node.arguments[0];
|
||||
if (routeArg?.type === 'StringLiteral') {
|
||||
routes.push({ method: method.toUpperCase(), path: routeArg.value });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assert.ok(routes.some(r => r.path === '/health'));
|
||||
});
|
||||
|
||||
test('detects NestJS controller decorators', () => {
|
||||
const code = `
|
||||
@Controller('users')
|
||||
export class UsersController {
|
||||
@Get()
|
||||
findAll() {
|
||||
return this.usersService.findAll();
|
||||
}
|
||||
|
||||
@Get(':id')
|
||||
findOne(@Param('id') id: string) {
|
||||
return this.usersService.findOne(id);
|
||||
}
|
||||
|
||||
@Post()
|
||||
create(@Body() createUserDto: CreateUserDto) {
|
||||
return this.usersService.create(createUserDto);
|
||||
}
|
||||
}
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const handlers = [];
|
||||
traverse.default(ast, {
|
||||
ClassMethod(path) {
|
||||
const decorators = path.node.decorators || [];
|
||||
for (const decorator of decorators) {
|
||||
const name = decorator.expression?.callee?.name || decorator.expression?.name;
|
||||
if (['Get', 'Post', 'Put', 'Delete', 'Patch'].includes(name)) {
|
||||
handlers.push({
|
||||
method: name.toUpperCase(),
|
||||
handler: path.node.key.name
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assert.equal(handlers.length, 3);
|
||||
assert.ok(handlers.some(h => h.handler === 'findAll'));
|
||||
assert.ok(handlers.some(h => h.handler === 'findOne'));
|
||||
assert.ok(handlers.some(h => h.handler === 'create'));
|
||||
});
|
||||
|
||||
test('detects Koa router handlers', () => {
|
||||
const code = `
|
||||
const Router = require('koa-router');
|
||||
const router = new Router();
|
||||
|
||||
router.get('/items', async (ctx) => {
|
||||
ctx.body = await getItems();
|
||||
});
|
||||
|
||||
router.post('/items', async (ctx) => {
|
||||
ctx.body = await createItem(ctx.request.body);
|
||||
});
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const routes = [];
|
||||
traverse.default(ast, {
|
||||
CallExpression(path) {
|
||||
if (path.node.callee.type === 'MemberExpression') {
|
||||
const objName = path.node.callee.object.name;
|
||||
const method = path.node.callee.property.name?.toLowerCase();
|
||||
|
||||
if (objName === 'router' && ['get', 'post', 'put', 'delete'].includes(method)) {
|
||||
const routeArg = path.node.arguments[0];
|
||||
if (routeArg?.type === 'StringLiteral') {
|
||||
routes.push({ method: method.toUpperCase(), path: routeArg.value });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assert.equal(routes.length, 2);
|
||||
assert.ok(routes.some(r => r.method === 'GET' && r.path === '/items'));
|
||||
assert.ok(routes.some(r => r.method === 'POST' && r.path === '/items'));
|
||||
});
|
||||
|
||||
test('detects AWS Lambda handlers', () => {
|
||||
const code = `
|
||||
export const handler = async (event, context) => {
|
||||
const body = JSON.parse(event.body);
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ message: 'Success' })
|
||||
};
|
||||
};
|
||||
|
||||
export const main = async (event) => {
|
||||
return { statusCode: 200 };
|
||||
};
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const handlers = [];
|
||||
traverse.default(ast, {
|
||||
VariableDeclarator(path) {
|
||||
const name = path.node.id?.name?.toLowerCase();
|
||||
if (['handler', 'main'].includes(name)) {
|
||||
if (path.node.init?.type === 'ArrowFunctionExpression') {
|
||||
handlers.push(path.node.id.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assert.ok(handlers.includes('handler'));
|
||||
assert.ok(handlers.includes('main'));
|
||||
});
|
||||
|
||||
test('detects Hapi route handlers', () => {
|
||||
const code = `
|
||||
const server = Hapi.server({ port: 3000 });
|
||||
|
||||
server.route({
|
||||
method: 'GET',
|
||||
path: '/users',
|
||||
handler: (request, h) => {
|
||||
return getUsers();
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: 'POST',
|
||||
path: '/users',
|
||||
handler: async (request, h) => {
|
||||
return createUser(request.payload);
|
||||
}
|
||||
});
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
let routeCount = 0;
|
||||
traverse.default(ast, {
|
||||
CallExpression(path) {
|
||||
if (path.node.callee.type === 'MemberExpression') {
|
||||
const method = path.node.callee.property.name;
|
||||
if (method === 'route') {
|
||||
routeCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assert.equal(routeCount, 2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Module Import/Export Detection', () => {
|
||||
test('detects CommonJS require', () => {
|
||||
const code = `
|
||||
const express = require('express');
|
||||
const { Router } = require('express');
|
||||
const db = require('./db');
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const imports = [];
|
||||
traverse.default(ast, {
|
||||
CallExpression(path) {
|
||||
if (path.node.callee.name === 'require') {
|
||||
const arg = path.node.arguments[0];
|
||||
if (arg?.type === 'StringLiteral') {
|
||||
imports.push(arg.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assert.ok(imports.includes('express'));
|
||||
assert.ok(imports.includes('./db'));
|
||||
});
|
||||
|
||||
test('detects ES module imports', () => {
|
||||
const code = `
|
||||
import express from 'express';
|
||||
import { Router, Request, Response } from 'express';
|
||||
import * as fs from 'fs';
|
||||
import db from './db.js';
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
const imports = [];
|
||||
traverse.default(ast, {
|
||||
ImportDeclaration(path) {
|
||||
imports.push(path.node.source.value);
|
||||
}
|
||||
});
|
||||
|
||||
assert.ok(imports.includes('express'));
|
||||
assert.ok(imports.includes('fs'));
|
||||
assert.ok(imports.includes('./db.js'));
|
||||
});
|
||||
|
||||
test('detects ES module exports', () => {
|
||||
const code = `
|
||||
export function publicFn() {}
|
||||
export const publicConst = 42;
|
||||
export default class MainClass {}
|
||||
export { helper, utils };
|
||||
`;
|
||||
const ast = parseCode(code);
|
||||
|
||||
let exportCount = 0;
|
||||
traverse.default(ast, {
|
||||
ExportNamedDeclaration() { exportCount++; },
|
||||
ExportDefaultDeclaration() { exportCount++; }
|
||||
});
|
||||
|
||||
assert.ok(exportCount >= 3);
|
||||
});
|
||||
});
|
||||
243
devops/tools/callgraph/node/package-lock.json
generated
Normal file
243
devops/tools/callgraph/node/package-lock.json
generated
Normal file
@@ -0,0 +1,243 @@
|
||||
{
|
||||
"name": "stella-callgraph-node",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "stella-callgraph-node",
|
||||
"version": "1.0.0",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@babel/parser": "^7.23.0",
|
||||
"@babel/traverse": "^7.23.0",
|
||||
"@babel/types": "^7.23.0"
|
||||
},
|
||||
"bin": {
|
||||
"stella-callgraph-node": "index.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/code-frame": {
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
|
||||
"integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/helper-validator-identifier": "^7.27.1",
|
||||
"js-tokens": "^4.0.0",
|
||||
"picocolors": "^1.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/generator": {
|
||||
"version": "7.28.5",
|
||||
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz",
|
||||
"integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/parser": "^7.28.5",
|
||||
"@babel/types": "^7.28.5",
|
||||
"@jridgewell/gen-mapping": "^0.3.12",
|
||||
"@jridgewell/trace-mapping": "^0.3.28",
|
||||
"jsesc": "^3.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/helper-globals": {
|
||||
"version": "7.28.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
|
||||
"integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/helper-string-parser": {
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
|
||||
"integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/helper-validator-identifier": {
|
||||
"version": "7.28.5",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz",
|
||||
"integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/parser": {
|
||||
"version": "7.28.5",
|
||||
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz",
|
||||
"integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/types": "^7.28.5"
|
||||
},
|
||||
"bin": {
|
||||
"parser": "bin/babel-parser.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/template": {
|
||||
"version": "7.27.2",
|
||||
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
|
||||
"integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/code-frame": "^7.27.1",
|
||||
"@babel/parser": "^7.27.2",
|
||||
"@babel/types": "^7.27.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/traverse": {
|
||||
"version": "7.28.5",
|
||||
"resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz",
|
||||
"integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/code-frame": "^7.27.1",
|
||||
"@babel/generator": "^7.28.5",
|
||||
"@babel/helper-globals": "^7.28.0",
|
||||
"@babel/parser": "^7.28.5",
|
||||
"@babel/template": "^7.27.2",
|
||||
"@babel/types": "^7.28.5",
|
||||
"debug": "^4.3.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/types": {
|
||||
"version": "7.28.5",
|
||||
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz",
|
||||
"integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/helper-string-parser": "^7.27.1",
|
||||
"@babel/helper-validator-identifier": "^7.28.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/gen-mapping": {
|
||||
"version": "0.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
|
||||
"integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jridgewell/sourcemap-codec": "^1.5.0",
|
||||
"@jridgewell/trace-mapping": "^0.3.24"
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/resolve-uri": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
|
||||
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/sourcemap-codec": {
|
||||
"version": "1.5.5",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
|
||||
"integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@jridgewell/trace-mapping": {
|
||||
"version": "0.3.31",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz",
|
||||
"integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.1.0",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.19.27",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.27.tgz",
|
||||
"integrity": "sha512-N2clP5pJhB2YnZJ3PIHFk5RkygRX5WO/5f0WC08tp0wd+sv0rsJk3MqWn3CbNmT2J505a5336jaQj4ph1AdMug==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"undici-types": "~6.21.0"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.4.3",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
|
||||
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/js-tokens": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
||||
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/jsesc": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
|
||||
"integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"jsesc": "bin/jsesc"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/picocolors": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
||||
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "6.21.0",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
|
||||
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
}
|
||||
}
|
||||
}
|
||||
33
devops/tools/callgraph/node/package.json
Normal file
33
devops/tools/callgraph/node/package.json
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"name": "stella-callgraph-node",
|
||||
"version": "1.0.0",
|
||||
"description": "Call graph extraction tool for JavaScript/TypeScript using Babel AST",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"stella-callgraph-node": "./index.js"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "node index.js",
|
||||
"test": "node --test"
|
||||
},
|
||||
"keywords": [
|
||||
"callgraph",
|
||||
"ast",
|
||||
"babel",
|
||||
"static-analysis",
|
||||
"security"
|
||||
],
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@babel/parser": "^7.23.0",
|
||||
"@babel/traverse": "^7.23.0",
|
||||
"@babel/types": "^7.23.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
}
|
||||
230
devops/tools/callgraph/node/sink-detect.js
Normal file
230
devops/tools/callgraph/node/sink-detect.js
Normal file
@@ -0,0 +1,230 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// sink-detect.js
|
||||
// Security sink detection patterns for JavaScript/TypeScript.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Sink detection patterns organized by category.
|
||||
*/
|
||||
export const sinkPatterns = {
|
||||
command_injection: {
|
||||
category: 'command_injection',
|
||||
patterns: [
|
||||
{ module: 'child_process', methods: ['exec', 'execSync', 'spawn', 'spawnSync', 'execFile', 'execFileSync', 'fork'] },
|
||||
{ module: 'shelljs', methods: ['exec', 'which', 'cat', 'sed', 'grep', 'rm', 'cp', 'mv', 'mkdir'] },
|
||||
{ object: 'process', methods: ['exec'] }
|
||||
]
|
||||
},
|
||||
|
||||
sql_injection: {
|
||||
category: 'sql_injection',
|
||||
patterns: [
|
||||
{ object: 'connection', methods: ['query', 'execute'] },
|
||||
{ object: 'pool', methods: ['query', 'execute'] },
|
||||
{ object: 'client', methods: ['query'] },
|
||||
{ module: 'mysql', methods: ['query', 'execute'] },
|
||||
{ module: 'mysql2', methods: ['query', 'execute'] },
|
||||
{ module: 'pg', methods: ['query'] },
|
||||
{ module: 'sqlite3', methods: ['run', 'exec', 'all', 'get'] },
|
||||
{ module: 'knex', methods: ['raw', 'whereRaw', 'havingRaw', 'orderByRaw'] },
|
||||
{ module: 'sequelize', methods: ['query', 'literal'] },
|
||||
{ module: 'typeorm', methods: ['query', 'createQueryBuilder'] },
|
||||
{ module: 'prisma', methods: ['$queryRaw', '$executeRaw', '$queryRawUnsafe', '$executeRawUnsafe'] }
|
||||
]
|
||||
},
|
||||
|
||||
file_write: {
|
||||
category: 'file_write',
|
||||
patterns: [
|
||||
{ module: 'fs', methods: ['writeFile', 'writeFileSync', 'appendFile', 'appendFileSync', 'createWriteStream', 'rename', 'renameSync', 'unlink', 'unlinkSync', 'rmdir', 'rmdirSync', 'rm', 'rmSync'] },
|
||||
{ module: 'fs/promises', methods: ['writeFile', 'appendFile', 'rename', 'unlink', 'rmdir', 'rm'] }
|
||||
]
|
||||
},
|
||||
|
||||
file_read: {
|
||||
category: 'file_read',
|
||||
patterns: [
|
||||
{ module: 'fs', methods: ['readFile', 'readFileSync', 'createReadStream', 'readdir', 'readdirSync'] },
|
||||
{ module: 'fs/promises', methods: ['readFile', 'readdir'] }
|
||||
]
|
||||
},
|
||||
|
||||
deserialization: {
|
||||
category: 'deserialization',
|
||||
patterns: [
|
||||
{ global: true, methods: ['eval', 'Function'] },
|
||||
{ object: 'JSON', methods: ['parse'] },
|
||||
{ module: 'vm', methods: ['runInContext', 'runInNewContext', 'runInThisContext', 'createScript'] },
|
||||
{ module: 'serialize-javascript', methods: ['deserialize'] },
|
||||
{ module: 'node-serialize', methods: ['unserialize'] },
|
||||
{ module: 'js-yaml', methods: ['load', 'loadAll'] }
|
||||
]
|
||||
},
|
||||
|
||||
ssrf: {
|
||||
category: 'ssrf',
|
||||
patterns: [
|
||||
{ module: 'http', methods: ['request', 'get'] },
|
||||
{ module: 'https', methods: ['request', 'get'] },
|
||||
{ module: 'axios', methods: ['get', 'post', 'put', 'delete', 'patch', 'request'] },
|
||||
{ module: 'node-fetch', methods: ['default'] },
|
||||
{ global: true, methods: ['fetch'] },
|
||||
{ module: 'got', methods: ['get', 'post', 'put', 'delete', 'patch'] },
|
||||
{ module: 'superagent', methods: ['get', 'post', 'put', 'delete', 'patch'] },
|
||||
{ module: 'request', methods: ['get', 'post', 'put', 'delete', 'patch'] },
|
||||
{ module: 'undici', methods: ['request', 'fetch'] }
|
||||
]
|
||||
},
|
||||
|
||||
path_traversal: {
|
||||
category: 'path_traversal',
|
||||
patterns: [
|
||||
{ module: 'path', methods: ['join', 'resolve', 'normalize'] },
|
||||
{ module: 'fs', methods: ['readFile', 'readFileSync', 'writeFile', 'writeFileSync', 'access', 'accessSync', 'stat', 'statSync'] }
|
||||
]
|
||||
},
|
||||
|
||||
weak_crypto: {
|
||||
category: 'weak_crypto',
|
||||
patterns: [
|
||||
{ module: 'crypto', methods: ['createCipher', 'createDecipher', 'createCipheriv', 'createDecipheriv'] },
|
||||
{ object: 'crypto', methods: ['createHash'] } // MD5, SHA1 are weak
|
||||
]
|
||||
},
|
||||
|
||||
ldap_injection: {
|
||||
category: 'ldap_injection',
|
||||
patterns: [
|
||||
{ module: 'ldapjs', methods: ['search', 'modify', 'add', 'del'] },
|
||||
{ module: 'activedirectory', methods: ['find', 'findUser', 'findGroup'] }
|
||||
]
|
||||
},
|
||||
|
||||
nosql_injection: {
|
||||
category: 'nosql_injection',
|
||||
patterns: [
|
||||
{ module: 'mongodb', methods: ['find', 'findOne', 'updateOne', 'updateMany', 'deleteOne', 'deleteMany', 'aggregate'] },
|
||||
{ module: 'mongoose', methods: ['find', 'findOne', 'findById', 'updateOne', 'updateMany', 'deleteOne', 'deleteMany', 'where', 'aggregate'] }
|
||||
]
|
||||
},
|
||||
|
||||
xss: {
|
||||
category: 'xss',
|
||||
patterns: [
|
||||
{ object: 'document', methods: ['write', 'writeln'] },
|
||||
{ object: 'element', methods: ['innerHTML', 'outerHTML'] },
|
||||
{ module: 'dangerouslySetInnerHTML', methods: ['__html'] } // React pattern
|
||||
]
|
||||
},
|
||||
|
||||
log_injection: {
|
||||
category: 'log_injection',
|
||||
patterns: [
|
||||
{ object: 'console', methods: ['log', 'info', 'warn', 'error', 'debug'] },
|
||||
{ module: 'winston', methods: ['log', 'info', 'warn', 'error', 'debug'] },
|
||||
{ module: 'pino', methods: ['info', 'warn', 'error', 'debug', 'trace'] },
|
||||
{ module: 'bunyan', methods: ['info', 'warn', 'error', 'debug', 'trace'] }
|
||||
]
|
||||
},
|
||||
|
||||
regex_dos: {
|
||||
category: 'regex_dos',
|
||||
patterns: [
|
||||
{ object: 'RegExp', methods: ['test', 'exec', 'match'] },
|
||||
{ global: true, methods: ['RegExp'] }
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Build a lookup map for fast sink detection.
|
||||
* @returns {Map<string, { category: string, method: string }>}
|
||||
*/
|
||||
export function buildSinkLookup() {
|
||||
const lookup = new Map();
|
||||
|
||||
for (const [_, config] of Object.entries(sinkPatterns)) {
|
||||
for (const pattern of config.patterns) {
|
||||
for (const method of pattern.methods) {
|
||||
// Key formats: "module:method", "object.method", "global:method"
|
||||
if (pattern.module) {
|
||||
lookup.set(`${pattern.module}:${method}`, { category: config.category, method });
|
||||
}
|
||||
if (pattern.object) {
|
||||
lookup.set(`${pattern.object}.${method}`, { category: config.category, method });
|
||||
}
|
||||
if (pattern.global) {
|
||||
lookup.set(`global:${method}`, { category: config.category, method });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return lookup;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a call expression is a security sink.
|
||||
* @param {string} objectOrModule - The object/module name (e.g., 'fs', 'child_process', 'connection')
|
||||
* @param {string} methodName - The method being called
|
||||
* @param {Map} sinkLookup - Pre-built sink lookup map
|
||||
* @returns {{ category: string, method: string } | null}
|
||||
*/
|
||||
export function matchSink(objectOrModule, methodName, sinkLookup) {
|
||||
// Check module:method pattern
|
||||
const moduleKey = `${objectOrModule}:${methodName}`;
|
||||
if (sinkLookup.has(moduleKey)) {
|
||||
return sinkLookup.get(moduleKey);
|
||||
}
|
||||
|
||||
// Check object.method pattern
|
||||
const objectKey = `${objectOrModule}.${methodName}`;
|
||||
if (sinkLookup.has(objectKey)) {
|
||||
return sinkLookup.get(objectKey);
|
||||
}
|
||||
|
||||
// Check global functions
|
||||
const globalKey = `global:${objectOrModule}`;
|
||||
if (sinkLookup.has(globalKey)) {
|
||||
return sinkLookup.get(globalKey);
|
||||
}
|
||||
|
||||
// Check if methodName itself is a global sink (like eval)
|
||||
const directGlobal = `global:${methodName}`;
|
||||
if (sinkLookup.has(directGlobal)) {
|
||||
return sinkLookup.get(directGlobal);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Common dangerous patterns that indicate direct user input flow.
|
||||
*/
|
||||
export const taintSources = [
|
||||
'req.body',
|
||||
'req.query',
|
||||
'req.params',
|
||||
'req.headers',
|
||||
'req.cookies',
|
||||
'request.body',
|
||||
'request.query',
|
||||
'request.params',
|
||||
'event.body',
|
||||
'event.queryStringParameters',
|
||||
'event.pathParameters',
|
||||
'ctx.request.body',
|
||||
'ctx.request.query',
|
||||
'ctx.params',
|
||||
'process.env',
|
||||
'process.argv'
|
||||
];
|
||||
|
||||
/**
|
||||
* Check if an identifier is a potential taint source.
|
||||
* @param {string} identifier
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export function isTaintSource(identifier) {
|
||||
return taintSources.some(source => identifier.includes(source));
|
||||
}
|
||||
236
devops/tools/callgraph/node/sink-detect.test.js
Normal file
236
devops/tools/callgraph/node/sink-detect.test.js
Normal file
@@ -0,0 +1,236 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// sink-detect.test.js
|
||||
// Sprint: SPRINT_3600_0004_0001 (Node.js Babel Integration)
|
||||
// Tasks: NODE-019 - Unit tests for sink detection (all categories)
|
||||
// Description: Tests for security sink detection patterns.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
import { test, describe } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { buildSinkLookup, matchSink, sinkPatterns, isTaintSource } from './sink-detect.js';
|
||||
|
||||
describe('buildSinkLookup', () => {
|
||||
test('builds lookup map with all patterns', () => {
|
||||
const lookup = buildSinkLookup();
|
||||
assert.ok(lookup instanceof Map);
|
||||
assert.ok(lookup.size > 0);
|
||||
});
|
||||
|
||||
test('includes command injection sinks', () => {
|
||||
const lookup = buildSinkLookup();
|
||||
assert.ok(lookup.has('child_process:exec'));
|
||||
assert.ok(lookup.has('child_process:spawn'));
|
||||
assert.ok(lookup.has('child_process:execSync'));
|
||||
});
|
||||
|
||||
test('includes SQL injection sinks', () => {
|
||||
const lookup = buildSinkLookup();
|
||||
assert.ok(lookup.has('connection.query'));
|
||||
assert.ok(lookup.has('mysql:query'));
|
||||
assert.ok(lookup.has('pg:query'));
|
||||
assert.ok(lookup.has('knex:raw'));
|
||||
});
|
||||
|
||||
test('includes file write sinks', () => {
|
||||
const lookup = buildSinkLookup();
|
||||
assert.ok(lookup.has('fs:writeFile'));
|
||||
assert.ok(lookup.has('fs:writeFileSync'));
|
||||
assert.ok(lookup.has('fs:appendFile'));
|
||||
});
|
||||
|
||||
test('includes deserialization sinks', () => {
|
||||
const lookup = buildSinkLookup();
|
||||
assert.ok(lookup.has('global:eval'));
|
||||
assert.ok(lookup.has('global:Function'));
|
||||
assert.ok(lookup.has('vm:runInContext'));
|
||||
});
|
||||
|
||||
test('includes SSRF sinks', () => {
|
||||
const lookup = buildSinkLookup();
|
||||
assert.ok(lookup.has('http:request'));
|
||||
assert.ok(lookup.has('https:get'));
|
||||
assert.ok(lookup.has('axios:get'));
|
||||
assert.ok(lookup.has('global:fetch'));
|
||||
});
|
||||
|
||||
test('includes NoSQL injection sinks', () => {
|
||||
const lookup = buildSinkLookup();
|
||||
assert.ok(lookup.has('mongodb:find'));
|
||||
assert.ok(lookup.has('mongoose:findOne'));
|
||||
assert.ok(lookup.has('mongodb:aggregate'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('matchSink', () => {
|
||||
const lookup = buildSinkLookup();
|
||||
|
||||
test('detects command injection via child_process.exec', () => {
|
||||
const result = matchSink('child_process', 'exec', lookup);
|
||||
assert.ok(result);
|
||||
assert.equal(result.category, 'command_injection');
|
||||
assert.equal(result.method, 'exec');
|
||||
});
|
||||
|
||||
test('detects command injection via child_process.spawn', () => {
|
||||
const result = matchSink('child_process', 'spawn', lookup);
|
||||
assert.ok(result);
|
||||
assert.equal(result.category, 'command_injection');
|
||||
});
|
||||
|
||||
test('detects SQL injection via connection.query', () => {
|
||||
const result = matchSink('connection', 'query', lookup);
|
||||
assert.ok(result);
|
||||
assert.equal(result.category, 'sql_injection');
|
||||
});
|
||||
|
||||
test('detects SQL injection via knex.raw', () => {
|
||||
const result = matchSink('knex', 'raw', lookup);
|
||||
assert.ok(result);
|
||||
assert.equal(result.category, 'sql_injection');
|
||||
});
|
||||
|
||||
test('detects SQL injection via prisma.$queryRaw', () => {
|
||||
const result = matchSink('prisma', '$queryRaw', lookup);
|
||||
assert.ok(result);
|
||||
assert.equal(result.category, 'sql_injection');
|
||||
});
|
||||
|
||||
test('detects file write via fs.writeFile', () => {
|
||||
const result = matchSink('fs', 'writeFile', lookup);
|
||||
assert.ok(result);
|
||||
// fs.writeFile is categorized in both file_write and path_traversal
|
||||
// The lookup returns path_traversal since it's processed later
|
||||
assert.ok(['file_write', 'path_traversal'].includes(result.category));
|
||||
});
|
||||
|
||||
test('detects deserialization via eval', () => {
|
||||
const result = matchSink('eval', 'eval', lookup);
|
||||
assert.ok(result);
|
||||
assert.equal(result.category, 'deserialization');
|
||||
});
|
||||
|
||||
test('detects SSRF via axios.get', () => {
|
||||
const result = matchSink('axios', 'get', lookup);
|
||||
assert.ok(result);
|
||||
assert.equal(result.category, 'ssrf');
|
||||
});
|
||||
|
||||
test('detects SSRF via fetch', () => {
|
||||
const result = matchSink('fetch', 'fetch', lookup);
|
||||
assert.ok(result);
|
||||
assert.equal(result.category, 'ssrf');
|
||||
});
|
||||
|
||||
test('detects NoSQL injection via mongoose.find', () => {
|
||||
const result = matchSink('mongoose', 'find', lookup);
|
||||
assert.ok(result);
|
||||
assert.equal(result.category, 'nosql_injection');
|
||||
});
|
||||
|
||||
test('detects weak crypto via crypto.createCipher', () => {
|
||||
const result = matchSink('crypto', 'createCipher', lookup);
|
||||
assert.ok(result);
|
||||
assert.equal(result.category, 'weak_crypto');
|
||||
});
|
||||
|
||||
test('detects LDAP injection via ldapjs.search', () => {
|
||||
const result = matchSink('ldapjs', 'search', lookup);
|
||||
assert.ok(result);
|
||||
assert.equal(result.category, 'ldap_injection');
|
||||
});
|
||||
|
||||
test('returns null for non-sink methods', () => {
|
||||
const result = matchSink('console', 'clear', lookup);
|
||||
assert.equal(result, null);
|
||||
});
|
||||
|
||||
test('returns null for unknown objects', () => {
|
||||
const result = matchSink('myCustomModule', 'doSomething', lookup);
|
||||
assert.equal(result, null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('sinkPatterns', () => {
|
||||
test('has expected categories', () => {
|
||||
const categories = Object.keys(sinkPatterns);
|
||||
assert.ok(categories.includes('command_injection'));
|
||||
assert.ok(categories.includes('sql_injection'));
|
||||
assert.ok(categories.includes('file_write'));
|
||||
assert.ok(categories.includes('deserialization'));
|
||||
assert.ok(categories.includes('ssrf'));
|
||||
assert.ok(categories.includes('nosql_injection'));
|
||||
assert.ok(categories.includes('xss'));
|
||||
assert.ok(categories.includes('log_injection'));
|
||||
});
|
||||
|
||||
test('command_injection has child_process patterns', () => {
|
||||
const cmdPatterns = sinkPatterns.command_injection.patterns;
|
||||
const childProcessPattern = cmdPatterns.find(p => p.module === 'child_process');
|
||||
assert.ok(childProcessPattern);
|
||||
assert.ok(childProcessPattern.methods.includes('exec'));
|
||||
assert.ok(childProcessPattern.methods.includes('spawn'));
|
||||
assert.ok(childProcessPattern.methods.includes('fork'));
|
||||
});
|
||||
|
||||
test('sql_injection covers major ORMs', () => {
|
||||
const sqlPatterns = sinkPatterns.sql_injection.patterns;
|
||||
const modules = sqlPatterns.map(p => p.module).filter(Boolean);
|
||||
assert.ok(modules.includes('mysql'));
|
||||
assert.ok(modules.includes('pg'));
|
||||
assert.ok(modules.includes('knex'));
|
||||
assert.ok(modules.includes('sequelize'));
|
||||
assert.ok(modules.includes('prisma'));
|
||||
});
|
||||
|
||||
test('ssrf covers HTTP clients', () => {
|
||||
const ssrfPatterns = sinkPatterns.ssrf.patterns;
|
||||
const modules = ssrfPatterns.map(p => p.module).filter(Boolean);
|
||||
assert.ok(modules.includes('http'));
|
||||
assert.ok(modules.includes('https'));
|
||||
assert.ok(modules.includes('axios'));
|
||||
assert.ok(modules.includes('got'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('isTaintSource', () => {
|
||||
test('detects req.body as taint source', () => {
|
||||
assert.ok(isTaintSource('req.body'));
|
||||
assert.ok(isTaintSource('req.body.username'));
|
||||
});
|
||||
|
||||
test('detects req.query as taint source', () => {
|
||||
assert.ok(isTaintSource('req.query'));
|
||||
assert.ok(isTaintSource('req.query.id'));
|
||||
});
|
||||
|
||||
test('detects req.params as taint source', () => {
|
||||
assert.ok(isTaintSource('req.params'));
|
||||
assert.ok(isTaintSource('req.params.userId'));
|
||||
});
|
||||
|
||||
test('detects req.headers as taint source', () => {
|
||||
assert.ok(isTaintSource('req.headers'));
|
||||
assert.ok(isTaintSource('req.headers.authorization'));
|
||||
});
|
||||
|
||||
test('detects event.body (Lambda) as taint source', () => {
|
||||
assert.ok(isTaintSource('event.body'));
|
||||
assert.ok(isTaintSource('event.queryStringParameters'));
|
||||
});
|
||||
|
||||
test('detects ctx.request.body (Koa) as taint source', () => {
|
||||
assert.ok(isTaintSource('ctx.request.body'));
|
||||
assert.ok(isTaintSource('ctx.params'));
|
||||
});
|
||||
|
||||
test('detects process.env as taint source', () => {
|
||||
assert.ok(isTaintSource('process.env'));
|
||||
assert.ok(isTaintSource('process.env.SECRET'));
|
||||
});
|
||||
|
||||
test('does not flag safe identifiers', () => {
|
||||
assert.ok(!isTaintSource('myLocalVariable'));
|
||||
assert.ok(!isTaintSource('config.port'));
|
||||
assert.ok(!isTaintSource('user.name'));
|
||||
});
|
||||
});
|
||||
168
devops/tools/callgraph/python/__main__.py
Normal file
168
devops/tools/callgraph/python/__main__.py
Normal file
@@ -0,0 +1,168 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
stella-callgraph-python
|
||||
Call graph extraction tool for Python projects using AST analysis.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import ast
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from ast_analyzer import PythonASTAnalyzer
|
||||
from framework_detect import detect_frameworks
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Extract call graphs from Python projects"
|
||||
)
|
||||
parser.add_argument(
|
||||
"path",
|
||||
help="Path to Python project or file"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--json",
|
||||
action="store_true",
|
||||
help="Output formatted JSON"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
"-v",
|
||||
action="store_true",
|
||||
help="Verbose output"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
result = analyze_project(Path(args.path), verbose=args.verbose)
|
||||
|
||||
if args.json:
|
||||
print(json.dumps(result, indent=2))
|
||||
else:
|
||||
print(json.dumps(result))
|
||||
|
||||
return 0
|
||||
except Exception as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
|
||||
def analyze_project(project_path: Path, verbose: bool = False) -> dict[str, Any]:
|
||||
"""Analyze a Python project and extract its call graph."""
|
||||
|
||||
if not project_path.exists():
|
||||
raise FileNotFoundError(f"Path not found: {project_path}")
|
||||
|
||||
# Find project root (look for pyproject.toml, setup.py, etc.)
|
||||
root = find_project_root(project_path)
|
||||
package_name = extract_package_name(root)
|
||||
|
||||
# Detect frameworks
|
||||
frameworks = detect_frameworks(root)
|
||||
|
||||
# Find Python source files
|
||||
source_files = find_python_files(root)
|
||||
|
||||
if verbose:
|
||||
print(f"Found {len(source_files)} Python files", file=sys.stderr)
|
||||
|
||||
# Analyze all files
|
||||
analyzer = PythonASTAnalyzer(package_name, root, frameworks)
|
||||
|
||||
for source_file in source_files:
|
||||
try:
|
||||
with open(source_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
tree = ast.parse(content, filename=str(source_file))
|
||||
relative_path = source_file.relative_to(root)
|
||||
analyzer.analyze_file(tree, str(relative_path))
|
||||
|
||||
except SyntaxError as e:
|
||||
if verbose:
|
||||
print(f"Warning: Syntax error in {source_file}: {e}", file=sys.stderr)
|
||||
except Exception as e:
|
||||
if verbose:
|
||||
print(f"Warning: Failed to parse {source_file}: {e}", file=sys.stderr)
|
||||
|
||||
return analyzer.get_result()
|
||||
|
||||
|
||||
def find_project_root(path: Path) -> Path:
|
||||
"""Find the project root by looking for marker files."""
|
||||
markers = ['pyproject.toml', 'setup.py', 'setup.cfg', 'requirements.txt', '.git']
|
||||
|
||||
current = path.resolve()
|
||||
if current.is_file():
|
||||
current = current.parent
|
||||
|
||||
while current != current.parent:
|
||||
for marker in markers:
|
||||
if (current / marker).exists():
|
||||
return current
|
||||
current = current.parent
|
||||
|
||||
return path.resolve() if path.is_dir() else path.parent.resolve()
|
||||
|
||||
|
||||
def extract_package_name(root: Path) -> str:
|
||||
"""Extract package name from project metadata."""
|
||||
|
||||
# Try pyproject.toml
|
||||
pyproject = root / 'pyproject.toml'
|
||||
if pyproject.exists():
|
||||
try:
|
||||
import tomllib
|
||||
with open(pyproject, 'rb') as f:
|
||||
data = tomllib.load(f)
|
||||
return data.get('project', {}).get('name', root.name)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Try setup.py
|
||||
setup_py = root / 'setup.py'
|
||||
if setup_py.exists():
|
||||
try:
|
||||
with open(setup_py, 'r') as f:
|
||||
content = f.read()
|
||||
# Simple regex-based extraction
|
||||
import re
|
||||
match = re.search(r"name\s*=\s*['\"]([^'\"]+)['\"]", content)
|
||||
if match:
|
||||
return match.group(1)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return root.name
|
||||
|
||||
|
||||
def find_python_files(root: Path) -> list[Path]:
|
||||
"""Find all Python source files in the project."""
|
||||
exclude_dirs = {
|
||||
'__pycache__', '.git', '.tox', '.nox', '.mypy_cache',
|
||||
'.pytest_cache', 'venv', '.venv', 'env', '.env',
|
||||
'node_modules', 'dist', 'build', 'eggs', '*.egg-info'
|
||||
}
|
||||
|
||||
files = []
|
||||
|
||||
for path in root.rglob('*.py'):
|
||||
# Skip excluded directories
|
||||
skip = False
|
||||
for part in path.parts:
|
||||
if part in exclude_dirs or part.endswith('.egg-info'):
|
||||
skip = True
|
||||
break
|
||||
|
||||
if not skip and not path.name.startswith('.'):
|
||||
files.append(path)
|
||||
|
||||
return sorted(files)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
322
devops/tools/callgraph/python/ast_analyzer.py
Normal file
322
devops/tools/callgraph/python/ast_analyzer.py
Normal file
@@ -0,0 +1,322 @@
|
||||
"""
|
||||
AST analyzer for Python call graph extraction.
|
||||
"""
|
||||
|
||||
import ast
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class FunctionNode:
|
||||
"""Represents a function in the call graph."""
|
||||
id: str
|
||||
package: str
|
||||
name: str
|
||||
qualified_name: str
|
||||
file: str
|
||||
line: int
|
||||
visibility: str
|
||||
annotations: list[str] = field(default_factory=list)
|
||||
is_entrypoint: bool = False
|
||||
entrypoint_type: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class CallEdge:
|
||||
"""Represents a call between functions."""
|
||||
from_id: str
|
||||
to_id: str
|
||||
kind: str
|
||||
file: str
|
||||
line: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class Entrypoint:
|
||||
"""Represents a detected entrypoint."""
|
||||
id: str
|
||||
type: str
|
||||
route: Optional[str] = None
|
||||
method: Optional[str] = None
|
||||
|
||||
|
||||
class PythonASTAnalyzer:
|
||||
"""Analyzes Python AST to extract call graph information."""
|
||||
|
||||
def __init__(self, package_name: str, root: Path, frameworks: list[str]):
|
||||
self.package_name = package_name
|
||||
self.root = root
|
||||
self.frameworks = frameworks
|
||||
self.nodes: dict[str, FunctionNode] = {}
|
||||
self.edges: list[CallEdge] = []
|
||||
self.entrypoints: list[Entrypoint] = []
|
||||
self.current_function: Optional[str] = None
|
||||
self.current_file: str = ""
|
||||
self.current_class: Optional[str] = None
|
||||
|
||||
def analyze_file(self, tree: ast.AST, relative_path: str) -> None:
|
||||
"""Analyze a single Python file."""
|
||||
self.current_file = relative_path
|
||||
self.current_function = None
|
||||
self.current_class = None
|
||||
|
||||
visitor = FunctionVisitor(self)
|
||||
visitor.visit(tree)
|
||||
|
||||
def get_result(self) -> dict[str, Any]:
|
||||
"""Get the analysis result as a dictionary."""
|
||||
return {
|
||||
"module": self.package_name,
|
||||
"nodes": [self._node_to_dict(n) for n in self.nodes.values()],
|
||||
"edges": [self._edge_to_dict(e) for e in self._dedupe_edges()],
|
||||
"entrypoints": [self._entrypoint_to_dict(e) for e in self.entrypoints]
|
||||
}
|
||||
|
||||
def _node_to_dict(self, node: FunctionNode) -> dict[str, Any]:
|
||||
return {
|
||||
"id": node.id,
|
||||
"package": node.package,
|
||||
"name": node.name,
|
||||
"signature": node.qualified_name,
|
||||
"position": {
|
||||
"file": node.file,
|
||||
"line": node.line,
|
||||
"column": 0
|
||||
},
|
||||
"visibility": node.visibility,
|
||||
"annotations": node.annotations
|
||||
}
|
||||
|
||||
def _edge_to_dict(self, edge: CallEdge) -> dict[str, Any]:
|
||||
return {
|
||||
"from": edge.from_id,
|
||||
"to": edge.to_id,
|
||||
"kind": edge.kind,
|
||||
"site": {
|
||||
"file": edge.file,
|
||||
"line": edge.line
|
||||
}
|
||||
}
|
||||
|
||||
def _entrypoint_to_dict(self, ep: Entrypoint) -> dict[str, Any]:
|
||||
result: dict[str, Any] = {
|
||||
"id": ep.id,
|
||||
"type": ep.type
|
||||
}
|
||||
if ep.route:
|
||||
result["route"] = ep.route
|
||||
if ep.method:
|
||||
result["method"] = ep.method
|
||||
return result
|
||||
|
||||
def _dedupe_edges(self) -> list[CallEdge]:
|
||||
seen: set[str] = set()
|
||||
result: list[CallEdge] = []
|
||||
for edge in self.edges:
|
||||
key = f"{edge.from_id}|{edge.to_id}"
|
||||
if key not in seen:
|
||||
seen.add(key)
|
||||
result.append(edge)
|
||||
return result
|
||||
|
||||
def make_symbol_id(self, name: str, class_name: Optional[str] = None) -> str:
|
||||
"""Create a symbol ID for a function or method."""
|
||||
module_base = self.current_file.replace('.py', '').replace('/', '.').replace('\\', '.')
|
||||
|
||||
if class_name:
|
||||
return f"py:{self.package_name}/{module_base}.{class_name}.{name}"
|
||||
return f"py:{self.package_name}/{module_base}.{name}"
|
||||
|
||||
def add_function(
|
||||
self,
|
||||
name: str,
|
||||
line: int,
|
||||
decorators: list[str],
|
||||
class_name: Optional[str] = None,
|
||||
is_private: bool = False
|
||||
) -> str:
|
||||
"""Add a function node to the graph."""
|
||||
symbol_id = self.make_symbol_id(name, class_name)
|
||||
|
||||
qualified_name = f"{class_name}.{name}" if class_name else name
|
||||
visibility = "private" if is_private or name.startswith('_') else "public"
|
||||
|
||||
node = FunctionNode(
|
||||
id=symbol_id,
|
||||
package=self.package_name,
|
||||
name=name,
|
||||
qualified_name=qualified_name,
|
||||
file=self.current_file,
|
||||
line=line,
|
||||
visibility=visibility,
|
||||
annotations=decorators
|
||||
)
|
||||
|
||||
self.nodes[symbol_id] = node
|
||||
|
||||
# Detect entrypoints
|
||||
entrypoint = self._detect_entrypoint(name, decorators, class_name)
|
||||
if entrypoint:
|
||||
node.is_entrypoint = True
|
||||
node.entrypoint_type = entrypoint.type
|
||||
self.entrypoints.append(entrypoint)
|
||||
|
||||
return symbol_id
|
||||
|
||||
def add_call(self, target_name: str, line: int) -> None:
|
||||
"""Add a call edge from the current function."""
|
||||
if not self.current_function:
|
||||
return
|
||||
|
||||
# Try to resolve the target
|
||||
target_id = self._resolve_target(target_name)
|
||||
|
||||
self.edges.append(CallEdge(
|
||||
from_id=self.current_function,
|
||||
to_id=target_id,
|
||||
kind="direct",
|
||||
file=self.current_file,
|
||||
line=line
|
||||
))
|
||||
|
||||
def _resolve_target(self, name: str) -> str:
|
||||
"""Resolve a call target to a symbol ID."""
|
||||
# Check if it's a known local function
|
||||
for node_id, node in self.nodes.items():
|
||||
if node.name == name or node.qualified_name == name:
|
||||
return node_id
|
||||
|
||||
# External or unresolved
|
||||
return f"py:external/{name}"
|
||||
|
||||
def _detect_entrypoint(
|
||||
self,
|
||||
name: str,
|
||||
decorators: list[str],
|
||||
class_name: Optional[str]
|
||||
) -> Optional[Entrypoint]:
|
||||
"""Detect if a function is an entrypoint based on frameworks and decorators."""
|
||||
symbol_id = self.make_symbol_id(name, class_name)
|
||||
|
||||
for decorator in decorators:
|
||||
# Flask routes
|
||||
if 'route' in decorator.lower() or decorator.lower() in ['get', 'post', 'put', 'delete', 'patch']:
|
||||
route = self._extract_route_from_decorator(decorator)
|
||||
method = self._extract_method_from_decorator(decorator)
|
||||
return Entrypoint(id=symbol_id, type="http_handler", route=route, method=method)
|
||||
|
||||
# FastAPI routes
|
||||
if decorator.lower() in ['get', 'post', 'put', 'delete', 'patch', 'api_route']:
|
||||
route = self._extract_route_from_decorator(decorator)
|
||||
return Entrypoint(id=symbol_id, type="http_handler", route=route, method=decorator.upper())
|
||||
|
||||
# Celery tasks
|
||||
if 'task' in decorator.lower() or 'shared_task' in decorator.lower():
|
||||
return Entrypoint(id=symbol_id, type="background_job")
|
||||
|
||||
# Click commands
|
||||
if 'command' in decorator.lower() or 'group' in decorator.lower():
|
||||
return Entrypoint(id=symbol_id, type="cli_command")
|
||||
|
||||
# Django views (class-based)
|
||||
if class_name and class_name.endswith('View'):
|
||||
if name in ['get', 'post', 'put', 'delete', 'patch']:
|
||||
return Entrypoint(id=symbol_id, type="http_handler", method=name.upper())
|
||||
|
||||
# main() function
|
||||
if name == 'main' and not class_name:
|
||||
return Entrypoint(id=symbol_id, type="cli_command")
|
||||
|
||||
return None
|
||||
|
||||
def _extract_route_from_decorator(self, decorator: str) -> Optional[str]:
|
||||
"""Extract route path from decorator string."""
|
||||
import re
|
||||
match = re.search(r"['\"]([/\w{}<>:.-]+)['\"]", decorator)
|
||||
return match.group(1) if match else None
|
||||
|
||||
def _extract_method_from_decorator(self, decorator: str) -> Optional[str]:
|
||||
"""Extract HTTP method from decorator string."""
|
||||
import re
|
||||
methods = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'HEAD', 'OPTIONS']
|
||||
for method in methods:
|
||||
if method.lower() in decorator.lower():
|
||||
return method
|
||||
match = re.search(r"methods\s*=\s*\[([^\]]+)\]", decorator)
|
||||
if match:
|
||||
return match.group(1).strip("'\"").upper()
|
||||
return None
|
||||
|
||||
|
||||
class FunctionVisitor(ast.NodeVisitor):
|
||||
"""AST visitor that extracts function definitions and calls."""
|
||||
|
||||
def __init__(self, analyzer: PythonASTAnalyzer):
|
||||
self.analyzer = analyzer
|
||||
|
||||
def visit_ClassDef(self, node: ast.ClassDef) -> None:
|
||||
"""Visit class definitions."""
|
||||
old_class = self.analyzer.current_class
|
||||
self.analyzer.current_class = node.name
|
||||
|
||||
self.generic_visit(node)
|
||||
|
||||
self.analyzer.current_class = old_class
|
||||
|
||||
def visit_FunctionDef(self, node: ast.FunctionDef) -> None:
|
||||
"""Visit function definitions."""
|
||||
self._visit_function(node)
|
||||
|
||||
def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> None:
|
||||
"""Visit async function definitions."""
|
||||
self._visit_function(node)
|
||||
|
||||
def _visit_function(self, node: ast.FunctionDef | ast.AsyncFunctionDef) -> None:
|
||||
"""Common logic for function and async function definitions."""
|
||||
decorators = [ast.unparse(d) for d in node.decorator_list]
|
||||
is_private = node.name.startswith('_') and not node.name.startswith('__')
|
||||
|
||||
symbol_id = self.analyzer.add_function(
|
||||
name=node.name,
|
||||
line=node.lineno,
|
||||
decorators=decorators,
|
||||
class_name=self.analyzer.current_class,
|
||||
is_private=is_private
|
||||
)
|
||||
|
||||
# Visit function body for calls
|
||||
old_function = self.analyzer.current_function
|
||||
self.analyzer.current_function = symbol_id
|
||||
|
||||
for child in ast.walk(node):
|
||||
if isinstance(child, ast.Call):
|
||||
target_name = self._get_call_target(child)
|
||||
if target_name:
|
||||
self.analyzer.add_call(target_name, child.lineno)
|
||||
|
||||
self.analyzer.current_function = old_function
|
||||
|
||||
def _get_call_target(self, node: ast.Call) -> Optional[str]:
|
||||
"""Extract the target name from a Call node."""
|
||||
if isinstance(node.func, ast.Name):
|
||||
return node.func.id
|
||||
elif isinstance(node.func, ast.Attribute):
|
||||
parts = self._get_attribute_parts(node.func)
|
||||
return '.'.join(parts)
|
||||
return None
|
||||
|
||||
def _get_attribute_parts(self, node: ast.Attribute) -> list[str]:
|
||||
"""Get all parts of an attribute chain."""
|
||||
parts: list[str] = []
|
||||
current: ast.expr = node
|
||||
|
||||
while isinstance(current, ast.Attribute):
|
||||
parts.insert(0, current.attr)
|
||||
current = current.value
|
||||
|
||||
if isinstance(current, ast.Name):
|
||||
parts.insert(0, current.id)
|
||||
|
||||
return parts
|
||||
250
devops/tools/callgraph/python/framework_detect.py
Normal file
250
devops/tools/callgraph/python/framework_detect.py
Normal file
@@ -0,0 +1,250 @@
|
||||
"""
|
||||
Framework detection for Python projects.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
import re
|
||||
|
||||
|
||||
# Framework patterns
|
||||
FRAMEWORK_PATTERNS = {
|
||||
"flask": {
|
||||
"packages": ["flask"],
|
||||
"imports": [r"from flask import", r"import flask"],
|
||||
"patterns": [r"@\w+\.route\(", r"Flask\(__name__\)"],
|
||||
"entrypoint_type": "http_handler"
|
||||
},
|
||||
"fastapi": {
|
||||
"packages": ["fastapi"],
|
||||
"imports": [r"from fastapi import", r"import fastapi"],
|
||||
"patterns": [r"@\w+\.(get|post|put|delete|patch)\(", r"FastAPI\("],
|
||||
"entrypoint_type": "http_handler"
|
||||
},
|
||||
"django": {
|
||||
"packages": ["django"],
|
||||
"imports": [r"from django", r"import django"],
|
||||
"patterns": [r"urlpatterns\s*=", r"class \w+View\(", r"@api_view\("],
|
||||
"entrypoint_type": "http_handler"
|
||||
},
|
||||
"click": {
|
||||
"packages": ["click"],
|
||||
"imports": [r"from click import", r"import click"],
|
||||
"patterns": [r"@click\.command\(", r"@click\.group\(", r"@\w+\.command\("],
|
||||
"entrypoint_type": "cli_command"
|
||||
},
|
||||
"typer": {
|
||||
"packages": ["typer"],
|
||||
"imports": [r"from typer import", r"import typer"],
|
||||
"patterns": [r"typer\.Typer\(", r"@\w+\.command\("],
|
||||
"entrypoint_type": "cli_command"
|
||||
},
|
||||
"celery": {
|
||||
"packages": ["celery"],
|
||||
"imports": [r"from celery import", r"import celery"],
|
||||
"patterns": [r"@\w+\.task\(", r"@shared_task\(", r"Celery\("],
|
||||
"entrypoint_type": "background_job"
|
||||
},
|
||||
"dramatiq": {
|
||||
"packages": ["dramatiq"],
|
||||
"imports": [r"from dramatiq import", r"import dramatiq"],
|
||||
"patterns": [r"@dramatiq\.actor\("],
|
||||
"entrypoint_type": "background_job"
|
||||
},
|
||||
"rq": {
|
||||
"packages": ["rq"],
|
||||
"imports": [r"from rq import", r"import rq"],
|
||||
"patterns": [r"@job\(", r"queue\.enqueue\("],
|
||||
"entrypoint_type": "background_job"
|
||||
},
|
||||
"sanic": {
|
||||
"packages": ["sanic"],
|
||||
"imports": [r"from sanic import", r"import sanic"],
|
||||
"patterns": [r"@\w+\.route\(", r"Sanic\("],
|
||||
"entrypoint_type": "http_handler"
|
||||
},
|
||||
"aiohttp": {
|
||||
"packages": ["aiohttp"],
|
||||
"imports": [r"from aiohttp import", r"import aiohttp"],
|
||||
"patterns": [r"web\.Application\(", r"@routes\.(get|post|put|delete)\("],
|
||||
"entrypoint_type": "http_handler"
|
||||
},
|
||||
"tornado": {
|
||||
"packages": ["tornado"],
|
||||
"imports": [r"from tornado import", r"import tornado"],
|
||||
"patterns": [r"class \w+Handler\(", r"tornado\.web\.Application\("],
|
||||
"entrypoint_type": "http_handler"
|
||||
},
|
||||
"aws_lambda": {
|
||||
"packages": ["aws_lambda_powertools", "boto3"],
|
||||
"imports": [r"def handler\(event", r"def lambda_handler\("],
|
||||
"patterns": [r"def handler\(event,\s*context\)", r"@logger\.inject_lambda_context"],
|
||||
"entrypoint_type": "lambda"
|
||||
},
|
||||
"azure_functions": {
|
||||
"packages": ["azure.functions"],
|
||||
"imports": [r"import azure\.functions"],
|
||||
"patterns": [r"@func\.route\(", r"func\.HttpRequest"],
|
||||
"entrypoint_type": "cloud_function"
|
||||
},
|
||||
"grpc": {
|
||||
"packages": ["grpcio", "grpc"],
|
||||
"imports": [r"import grpc", r"from grpc import"],
|
||||
"patterns": [r"_pb2_grpc\.add_\w+Servicer_to_server\("],
|
||||
"entrypoint_type": "grpc_method"
|
||||
},
|
||||
"graphql": {
|
||||
"packages": ["graphene", "strawberry", "ariadne"],
|
||||
"imports": [r"import graphene", r"import strawberry", r"import ariadne"],
|
||||
"patterns": [r"@strawberry\.(type|mutation|query)\(", r"class \w+\(graphene\.ObjectType\)"],
|
||||
"entrypoint_type": "graphql_resolver"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def detect_frameworks(project_root: Path) -> list[str]:
|
||||
"""Detect frameworks used in a Python project."""
|
||||
detected: set[str] = set()
|
||||
|
||||
# Check pyproject.toml
|
||||
pyproject = project_root / "pyproject.toml"
|
||||
if pyproject.exists():
|
||||
detected.update(_detect_from_pyproject(pyproject))
|
||||
|
||||
# Check requirements.txt
|
||||
requirements = project_root / "requirements.txt"
|
||||
if requirements.exists():
|
||||
detected.update(_detect_from_requirements(requirements))
|
||||
|
||||
# Check setup.py
|
||||
setup_py = project_root / "setup.py"
|
||||
if setup_py.exists():
|
||||
detected.update(_detect_from_setup_py(setup_py))
|
||||
|
||||
# Scan source files for import patterns
|
||||
detected.update(_detect_from_source(project_root))
|
||||
|
||||
return sorted(detected)
|
||||
|
||||
|
||||
def _detect_from_pyproject(path: Path) -> set[str]:
|
||||
"""Detect frameworks from pyproject.toml."""
|
||||
detected: set[str] = set()
|
||||
|
||||
try:
|
||||
import tomllib
|
||||
with open(path, 'rb') as f:
|
||||
data = tomllib.load(f)
|
||||
|
||||
# Check dependencies
|
||||
deps = set()
|
||||
deps.update(data.get("project", {}).get("dependencies", []))
|
||||
deps.update(data.get("project", {}).get("optional-dependencies", {}).get("dev", []))
|
||||
|
||||
# Poetry format
|
||||
poetry = data.get("tool", {}).get("poetry", {})
|
||||
deps.update(poetry.get("dependencies", {}).keys())
|
||||
deps.update(poetry.get("dev-dependencies", {}).keys())
|
||||
|
||||
for dep in deps:
|
||||
# Extract package name (remove version specifier)
|
||||
pkg = re.split(r'[<>=!~\[]', dep)[0].strip().lower()
|
||||
for framework, config in FRAMEWORK_PATTERNS.items():
|
||||
if pkg in config["packages"]:
|
||||
detected.add(framework)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return detected
|
||||
|
||||
|
||||
def _detect_from_requirements(path: Path) -> set[str]:
|
||||
"""Detect frameworks from requirements.txt."""
|
||||
detected: set[str] = set()
|
||||
|
||||
try:
|
||||
with open(path, 'r') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line.startswith('#'):
|
||||
continue
|
||||
|
||||
# Extract package name
|
||||
pkg = re.split(r'[<>=!~\[]', line)[0].strip().lower()
|
||||
for framework, config in FRAMEWORK_PATTERNS.items():
|
||||
if pkg in config["packages"]:
|
||||
detected.add(framework)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return detected
|
||||
|
||||
|
||||
def _detect_from_setup_py(path: Path) -> set[str]:
|
||||
"""Detect frameworks from setup.py."""
|
||||
detected: set[str] = set()
|
||||
|
||||
try:
|
||||
with open(path, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Look for install_requires
|
||||
for framework, config in FRAMEWORK_PATTERNS.items():
|
||||
for pkg in config["packages"]:
|
||||
if f'"{pkg}"' in content or f"'{pkg}'" in content:
|
||||
detected.add(framework)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return detected
|
||||
|
||||
|
||||
def _detect_from_source(project_root: Path) -> set[str]:
|
||||
"""Detect frameworks by scanning Python source files."""
|
||||
detected: set[str] = set()
|
||||
|
||||
exclude_dirs = {
|
||||
'__pycache__', '.git', '.tox', '.nox', 'venv', '.venv', 'env', '.env',
|
||||
'node_modules', 'dist', 'build'
|
||||
}
|
||||
|
||||
# Only scan first few files to avoid slow startup
|
||||
max_files = 50
|
||||
scanned = 0
|
||||
|
||||
for py_file in project_root.rglob('*.py'):
|
||||
if scanned >= max_files:
|
||||
break
|
||||
|
||||
# Skip excluded directories
|
||||
skip = False
|
||||
for part in py_file.parts:
|
||||
if part in exclude_dirs:
|
||||
skip = True
|
||||
break
|
||||
if skip:
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(py_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read(4096) # Only read first 4KB
|
||||
|
||||
for framework, config in FRAMEWORK_PATTERNS.items():
|
||||
if framework in detected:
|
||||
continue
|
||||
|
||||
for pattern in config["imports"] + config["patterns"]:
|
||||
if re.search(pattern, content):
|
||||
detected.add(framework)
|
||||
break
|
||||
|
||||
scanned += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return detected
|
||||
|
||||
|
||||
def get_entrypoint_type(framework: str) -> str:
|
||||
"""Get the entrypoint type for a framework."""
|
||||
return FRAMEWORK_PATTERNS.get(framework, {}).get("entrypoint_type", "unknown")
|
||||
2
devops/tools/callgraph/python/requirements.txt
Normal file
2
devops/tools/callgraph/python/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
# stella-callgraph-python requirements
|
||||
# No external dependencies - uses Python 3.11+ stdlib only
|
||||
130
devops/tools/check-channel-alignment.py
Normal file
130
devops/tools/check-channel-alignment.py
Normal file
@@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Ensure deployment bundles reference the images defined in a release manifest.
|
||||
|
||||
Usage:
|
||||
./deploy/tools/check-channel-alignment.py \
|
||||
--release deploy/releases/2025.10-edge.yaml \
|
||||
--target deploy/helm/stellaops/values-dev.yaml \
|
||||
--target deploy/compose/docker-compose.dev.yaml
|
||||
|
||||
For every target file, the script scans `image:` declarations and verifies that
|
||||
any image belonging to a repository listed in the release manifest matches the
|
||||
exact digest or tag recorded there. Images outside of the manifest (for example,
|
||||
supporting services such as `nats`) are ignored.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, Iterable, List, Optional, Set
|
||||
|
||||
IMAGE_LINE = re.compile(r"^\s*image:\s*['\"]?(?P<image>\S+)['\"]?\s*$")
|
||||
|
||||
|
||||
def extract_images(path: pathlib.Path) -> List[str]:
|
||||
images: List[str] = []
|
||||
for line in path.read_text(encoding="utf-8").splitlines():
|
||||
match = IMAGE_LINE.match(line)
|
||||
if match:
|
||||
images.append(match.group("image"))
|
||||
return images
|
||||
|
||||
|
||||
def image_repo(image: str) -> str:
|
||||
if "@" in image:
|
||||
return image.split("@", 1)[0]
|
||||
# Split on the last colon to preserve registries with ports (e.g. localhost:5000)
|
||||
if ":" in image:
|
||||
prefix, tag = image.rsplit(":", 1)
|
||||
if "/" in tag:
|
||||
# handle digestive colon inside path (unlikely)
|
||||
return image
|
||||
return prefix
|
||||
return image
|
||||
|
||||
|
||||
def load_release_map(release_path: pathlib.Path) -> Dict[str, str]:
|
||||
release_map: Dict[str, str] = {}
|
||||
for image in extract_images(release_path):
|
||||
repo = image_repo(image)
|
||||
release_map[repo] = image
|
||||
return release_map
|
||||
|
||||
|
||||
def check_target(
|
||||
target_path: pathlib.Path,
|
||||
release_map: Dict[str, str],
|
||||
ignore_repos: Set[str],
|
||||
) -> List[str]:
|
||||
errors: List[str] = []
|
||||
for image in extract_images(target_path):
|
||||
repo = image_repo(image)
|
||||
if repo in ignore_repos:
|
||||
continue
|
||||
if repo not in release_map:
|
||||
continue
|
||||
expected = release_map[repo]
|
||||
if image != expected:
|
||||
errors.append(
|
||||
f"{target_path}: {image} does not match release value {expected}"
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
def parse_args(argv: Optional[Iterable[str]] = None) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"--release",
|
||||
required=True,
|
||||
type=pathlib.Path,
|
||||
help="Path to the release manifest (YAML)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target",
|
||||
action="append",
|
||||
required=True,
|
||||
type=pathlib.Path,
|
||||
help="Deployment profile to validate against the release manifest",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ignore-repo",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Repository prefix to ignore (may be repeated)",
|
||||
)
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: Optional[Iterable[str]] = None) -> int:
|
||||
args = parse_args(argv)
|
||||
|
||||
release_map = load_release_map(args.release)
|
||||
ignore_repos = {repo.rstrip("/") for repo in args.ignore_repo}
|
||||
|
||||
if not release_map:
|
||||
print(f"error: no images found in release manifest {args.release}", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
total_errors: List[str] = []
|
||||
for target in args.target:
|
||||
if not target.exists():
|
||||
total_errors.append(f"{target}: file not found")
|
||||
continue
|
||||
total_errors.extend(check_target(target, release_map, ignore_repos))
|
||||
|
||||
if total_errors:
|
||||
print("✖ channel alignment check failed:", file=sys.stderr)
|
||||
for err in total_errors:
|
||||
print(f" - {err}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
print("✓ deployment profiles reference release images for the inspected repositories.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
46
devops/tools/ci/run-concelier-attestation-tests.sh
Normal file
46
devops/tools/ci/run-concelier-attestation-tests.sh
Normal file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
ROOT_DIR="$(cd "$(dirname "$0")/../.." && pwd)"
|
||||
cd "$ROOT_DIR"
|
||||
|
||||
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
|
||||
export DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||
export DOTNET_NOLOGO=1
|
||||
|
||||
# Restore once for the Concelier solution.
|
||||
dotnet restore src/Concelier/StellaOps.Concelier.sln
|
||||
|
||||
# Build the two test projects with analyzers disabled to keep CI fast.
|
||||
dotnet build src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj \
|
||||
-c Release -p:DisableAnalyzers=true
|
||||
|
||||
dotnet build src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj \
|
||||
-c Release -p:DisableAnalyzers=true
|
||||
|
||||
# Run filtered attestation tests; keep logs in TestResults.
|
||||
RESULTS=TestResults/concelier-attestation
|
||||
mkdir -p "$RESULTS"
|
||||
|
||||
core_log="$RESULTS/core.trx"
|
||||
web_log="$RESULTS/web.trx"
|
||||
|
||||
set +e
|
||||
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj \
|
||||
-c Release --no-build --filter EvidenceBundleAttestationBuilderTests \
|
||||
--logger "trx;LogFileName=$(basename "$core_log")" --results-directory "$RESULTS"
|
||||
CORE_EXIT=$?
|
||||
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj \
|
||||
-c Release --no-build --filter InternalAttestationVerify \
|
||||
--logger "trx;LogFileName=$(basename "$web_log")" --results-directory "$RESULTS"
|
||||
WEB_EXIT=$?
|
||||
|
||||
set -e
|
||||
|
||||
if [[ $CORE_EXIT -ne 0 || $WEB_EXIT -ne 0 ]]; then
|
||||
echo "Attestation test run failed: core=$CORE_EXIT web=$WEB_EXIT" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Attestation tests succeeded; results in $RESULTS"
|
||||
26
devops/tools/commit-prep-artifacts.sh
Normal file
26
devops/tools/commit-prep-artifacts.sh
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Helper to stage and commit the prep/doc updates once disk/PTY issues are resolved.
|
||||
# Usage: ./scripts/commit-prep-artifacts.sh "Your commit message"
|
||||
|
||||
root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "$root"
|
||||
|
||||
git add \
|
||||
docs/modules/policy/prep/2025-11-20-policy-airgap-prep.md \
|
||||
docs/modules/policy/prep/2025-11-20-policy-aoc-prep.md \
|
||||
docs/modules/policy/prep/2025-11-20-policy-attest-prep.md \
|
||||
docs/modules/policy/prep/2025-11-21-policy-metrics-29-004-prep.md \
|
||||
docs/modules/policy/prep/2025-11-21-policy-path-scope-29-002-prep.md \
|
||||
docs/modules/scanner/prep/2025-11-21-scanner-records-prep.md \
|
||||
docs/samples/prep/2025-11-20-lnm-22-001-prep.md \
|
||||
docs/implplan/SPRINT_0123_0001_0001_policy_reasoning.md \
|
||||
docs/implplan/SPRINT_0123_0001_0001_policy_reasoning.md \
|
||||
docs/implplan/SPRINT_0125_0001_0001_policy_reasoning.md \
|
||||
docs/implplan/SPRINT_0131_0001_0001_scanner_surface.md
|
||||
|
||||
git status --short
|
||||
|
||||
msg="${1:-Start prep on policy path/scope, metrics/logging, and scanner record payloads}"
|
||||
git commit -m "$msg"
|
||||
87
devops/tools/concelier/backfill-store-aoc-19-005.sh
Normal file
87
devops/tools/concelier/backfill-store-aoc-19-005.sh
Normal file
@@ -0,0 +1,87 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Postgres backfill runner for STORE-AOC-19-005-DEV (Link-Not-Merge raw linksets/chunks)
|
||||
# Usage:
|
||||
# PGURI=postgres://.../concelier ./scripts/concelier/backfill-store-aoc-19-005.sh /path/to/linksets-stage-backfill.tar.zst
|
||||
# Optional:
|
||||
# PGSCHEMA=lnm_raw (default), DRY_RUN=1 to stop after extraction
|
||||
#
|
||||
# Assumptions:
|
||||
# - Dataset contains ndjson files: linksets.ndjson, advisory_chunks.ndjson, manifest.json
|
||||
# - Target staging tables are created by this script if absent:
|
||||
# <schema>.linksets_raw(id text primary key, raw jsonb)
|
||||
# <schema>.advisory_chunks_raw(id text primary key, raw jsonb)
|
||||
|
||||
DATASET_PATH="${1:-}"
|
||||
if [[ -z "${DATASET_PATH}" || ! -f "${DATASET_PATH}" ]]; then
|
||||
echo "Dataset tarball not found. Provide path to linksets-stage-backfill.tar.zst" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PGURI="${PGURI:-${CONCELIER_PG_URI:-}}"
|
||||
PGSCHEMA="${PGSCHEMA:-lnm_raw}"
|
||||
DRY_RUN="${DRY_RUN:-0}"
|
||||
|
||||
if [[ -z "${PGURI}" ]]; then
|
||||
echo "PGURI (or CONCELIER_PG_URI) must be set" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
WORKDIR="$(mktemp -d)"
|
||||
cleanup() { rm -rf "${WORKDIR}"; }
|
||||
trap cleanup EXIT
|
||||
|
||||
echo "==> Dataset: ${DATASET_PATH}"
|
||||
sha256sum "${DATASET_PATH}"
|
||||
|
||||
echo "==> Extracting to ${WORKDIR}"
|
||||
tar -xf "${DATASET_PATH}" -C "${WORKDIR}"
|
||||
|
||||
for required in linksets.ndjson advisory_chunks.ndjson manifest.json; do
|
||||
if [[ ! -f "${WORKDIR}/${required}" ]]; then
|
||||
echo "Missing required file in dataset: ${required}" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "==> Ensuring staging schema/tables exist in Postgres"
|
||||
psql "${PGURI}" <<SQL
|
||||
create schema if not exists ${PGSCHEMA};
|
||||
create table if not exists ${PGSCHEMA}.linksets_raw (
|
||||
id text primary key,
|
||||
raw jsonb not null
|
||||
);
|
||||
create table if not exists ${PGSCHEMA}.advisory_chunks_raw (
|
||||
id text primary key,
|
||||
raw jsonb not null
|
||||
);
|
||||
SQL
|
||||
|
||||
if [[ "${DRY_RUN}" != "0" ]]; then
|
||||
echo "DRY_RUN=1 set; extraction and schema verification completed, skipping import."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "==> Importing linksets into ${PGSCHEMA}.linksets_raw"
|
||||
cat >"${WORKDIR}/linksets.tsv" <(jq -rc '[._id, .] | @tsv' "${WORKDIR}/linksets.ndjson")
|
||||
psql "${PGURI}" <<SQL
|
||||
TRUNCATE TABLE ${PGSCHEMA}.linksets_raw;
|
||||
\copy ${PGSCHEMA}.linksets_raw (id, raw) FROM '${WORKDIR}/linksets.tsv' WITH (FORMAT csv, DELIMITER E'\t', QUOTE '"', ESCAPE '"');
|
||||
SQL
|
||||
|
||||
echo "==> Importing advisory_chunks into ${PGSCHEMA}.advisory_chunks_raw"
|
||||
cat >"${WORKDIR}/advisory_chunks.tsv" <(jq -rc '[._id, .] | @tsv' "${WORKDIR}/advisory_chunks.ndjson")
|
||||
psql "${PGURI}" <<SQL
|
||||
TRUNCATE TABLE ${PGSCHEMA}.advisory_chunks_raw;
|
||||
\copy ${PGSCHEMA}.advisory_chunks_raw (id, raw) FROM '${WORKDIR}/advisory_chunks.tsv' WITH (FORMAT csv, DELIMITER E'\t', QUOTE '"', ESCAPE '"');
|
||||
SQL
|
||||
|
||||
echo "==> Post-import counts"
|
||||
psql -tA "${PGURI}" -c "select 'linksets_raw='||count(*) from ${PGSCHEMA}.linksets_raw;"
|
||||
psql -tA "${PGURI}" -c "select 'advisory_chunks_raw='||count(*) from ${PGSCHEMA}.advisory_chunks_raw;"
|
||||
|
||||
echo "==> Manifest summary"
|
||||
cat "${WORKDIR}/manifest.json"
|
||||
|
||||
echo "Backfill complete."
|
||||
74
devops/tools/concelier/build-store-aoc-19-005-dataset.sh
Normal file
74
devops/tools/concelier/build-store-aoc-19-005-dataset.sh
Normal file
@@ -0,0 +1,74 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Deterministic dataset builder for STORE-AOC-19-005-DEV.
|
||||
# Generates linksets-stage-backfill.tar.zst from repo seed data.
|
||||
# Usage:
|
||||
# ./scripts/concelier/build-store-aoc-19-005-dataset.sh [output_tarball]
|
||||
# Default output: out/linksets/linksets-stage-backfill.tar.zst
|
||||
|
||||
command -v tar >/dev/null || { echo "tar is required" >&2; exit 1; }
|
||||
command -v sha256sum >/dev/null || { echo "sha256sum is required" >&2; exit 1; }
|
||||
|
||||
TAR_COMPRESS=()
|
||||
if command -v zstd >/dev/null 2>&1; then
|
||||
TAR_COMPRESS=(--zstd)
|
||||
else
|
||||
echo "zstd not found; building uncompressed tarball (extension kept for compatibility)" >&2
|
||||
fi
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
SEED_DIR="${ROOT_DIR}/seed-data/concelier/store-aoc-19-005"
|
||||
OUT_DIR="${ROOT_DIR}/out/linksets"
|
||||
OUT_PATH="${1:-${OUT_DIR}/linksets-stage-backfill.tar.zst}"
|
||||
GEN_TIME="2025-12-07T00:00:00Z"
|
||||
|
||||
for seed in linksets.ndjson advisory_chunks.ndjson; do
|
||||
if [[ ! -f "${SEED_DIR}/${seed}" ]]; then
|
||||
echo "Missing seed file: ${SEED_DIR}/${seed}" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
WORKDIR="$(mktemp -d)"
|
||||
cleanup() { rm -rf "${WORKDIR}"; }
|
||||
trap cleanup EXIT
|
||||
|
||||
cp "${SEED_DIR}/linksets.ndjson" "${WORKDIR}/linksets.ndjson"
|
||||
cp "${SEED_DIR}/advisory_chunks.ndjson" "${WORKDIR}/advisory_chunks.ndjson"
|
||||
|
||||
linksets_sha=$(sha256sum "${WORKDIR}/linksets.ndjson" | awk '{print $1}')
|
||||
advisory_sha=$(sha256sum "${WORKDIR}/advisory_chunks.ndjson" | awk '{print $1}')
|
||||
linksets_count=$(wc -l < "${WORKDIR}/linksets.ndjson" | tr -d '[:space:]')
|
||||
advisory_count=$(wc -l < "${WORKDIR}/advisory_chunks.ndjson" | tr -d '[:space:]')
|
||||
|
||||
cat >"${WORKDIR}/manifest.json" <<EOF
|
||||
{
|
||||
"datasetId": "store-aoc-19-005-dev",
|
||||
"generatedAt": "${GEN_TIME}",
|
||||
"source": "seed-data/concelier/store-aoc-19-005",
|
||||
"records": {
|
||||
"linksets": ${linksets_count},
|
||||
"advisory_chunks": ${advisory_count}
|
||||
},
|
||||
"sha256": {
|
||||
"linksets.ndjson": "${linksets_sha}",
|
||||
"advisory_chunks.ndjson": "${advisory_sha}"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
mkdir -p "${OUT_DIR}"
|
||||
|
||||
tar "${TAR_COMPRESS[@]}" \
|
||||
--format=ustar \
|
||||
--mtime='1970-01-01 00:00:00Z' \
|
||||
--owner=0 --group=0 --numeric-owner \
|
||||
-cf "${OUT_PATH}" \
|
||||
-C "${WORKDIR}" \
|
||||
linksets.ndjson advisory_chunks.ndjson manifest.json
|
||||
|
||||
sha256sum "${OUT_PATH}" > "${OUT_PATH}.sha256"
|
||||
|
||||
echo "Wrote ${OUT_PATH}"
|
||||
cat "${OUT_PATH}.sha256"
|
||||
55
devops/tools/concelier/export-linksets-tarball.sh
Normal file
55
devops/tools/concelier/export-linksets-tarball.sh
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Export Concelier linksets/advisory_chunks from Postgres to a tar.zst bundle.
|
||||
# Usage:
|
||||
# PGURI=postgres://user:pass@host:5432/db \
|
||||
# ./scripts/concelier/export-linksets-tarball.sh out/linksets/linksets-stage-backfill.tar.zst
|
||||
#
|
||||
# Optional env:
|
||||
# PGSCHEMA=public # schema that owns linksets/advisory_chunks
|
||||
# LINKSETS_TABLE=linksets # table name for linksets
|
||||
# CHUNKS_TABLE=advisory_chunks # table name for advisory chunks
|
||||
# TMPDIR=/tmp/export-linksets # working directory (defaults to mktemp)
|
||||
|
||||
TARGET="${1:-}"
|
||||
if [[ -z "${TARGET}" ]]; then
|
||||
echo "Usage: PGURI=... $0 out/linksets/linksets-stage-backfill.tar.zst" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "${PGURI:-}" ]]; then
|
||||
echo "PGURI environment variable is required (postgres://...)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PGSCHEMA="${PGSCHEMA:-public}"
|
||||
LINKSETS_TABLE="${LINKSETS_TABLE:-linksets}"
|
||||
CHUNKS_TABLE="${CHUNKS_TABLE:-advisory_chunks}"
|
||||
WORKDIR="${TMPDIR:-$(mktemp -d)}"
|
||||
|
||||
mkdir -p "${WORKDIR}"
|
||||
OUTDIR="$(dirname "${TARGET}")"
|
||||
mkdir -p "${OUTDIR}"
|
||||
|
||||
echo "==> Exporting linksets from ${PGSCHEMA}.${LINKSETS_TABLE}"
|
||||
psql "${PGURI}" -c "\copy (select row_to_json(t) from ${PGSCHEMA}.${LINKSETS_TABLE} t) to '${WORKDIR}/linksets.ndjson'"
|
||||
|
||||
echo "==> Exporting advisory_chunks from ${PGSCHEMA}.${CHUNKS_TABLE}"
|
||||
psql "${PGURI}" -c "\copy (select row_to_json(t) from ${PGSCHEMA}.${CHUNKS_TABLE} t) to '${WORKDIR}/advisory_chunks.ndjson'"
|
||||
|
||||
LINKSETS_COUNT="$(wc -l < "${WORKDIR}/linksets.ndjson")"
|
||||
CHUNKS_COUNT="$(wc -l < "${WORKDIR}/advisory_chunks.ndjson")"
|
||||
|
||||
echo "==> Writing manifest.json"
|
||||
jq -n --argjson linksets "${LINKSETS_COUNT}" --argjson advisory_chunks "${CHUNKS_COUNT}" \
|
||||
'{linksets: $linksets, advisory_chunks: $advisory_chunks}' \
|
||||
> "${WORKDIR}/manifest.json"
|
||||
|
||||
echo "==> Building tarball ${TARGET}"
|
||||
tar -I "zstd -19" -cf "${TARGET}" -C "${WORKDIR}" linksets.ndjson advisory_chunks.ndjson manifest.json
|
||||
|
||||
echo "==> SHA-256"
|
||||
sha256sum "${TARGET}"
|
||||
|
||||
echo "Done. Workdir: ${WORKDIR}"
|
||||
90
devops/tools/concelier/test-store-aoc-19-005-dataset.sh
Normal file
90
devops/tools/concelier/test-store-aoc-19-005-dataset.sh
Normal file
@@ -0,0 +1,90 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Validates the store-aoc-19-005 dataset tarball.
|
||||
# Usage: ./scripts/concelier/test-store-aoc-19-005-dataset.sh [tarball]
|
||||
|
||||
command -v tar >/dev/null || { echo "tar is required" >&2; exit 1; }
|
||||
command -v sha256sum >/dev/null || { echo "sha256sum is required" >&2; exit 1; }
|
||||
command -v python >/dev/null || { echo "python is required" >&2; exit 1; }
|
||||
|
||||
DATASET="${1:-out/linksets/linksets-stage-backfill.tar.zst}"
|
||||
|
||||
if [[ ! -f "${DATASET}" ]]; then
|
||||
echo "Dataset not found: ${DATASET}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
WORKDIR="$(mktemp -d)"
|
||||
cleanup() { rm -rf "${WORKDIR}"; }
|
||||
trap cleanup EXIT
|
||||
|
||||
tar -xf "${DATASET}" -C "${WORKDIR}"
|
||||
|
||||
for required in linksets.ndjson advisory_chunks.ndjson manifest.json; do
|
||||
if [[ ! -f "${WORKDIR}/${required}" ]]; then
|
||||
echo "Missing ${required} in dataset" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
manifest="${WORKDIR}/manifest.json"
|
||||
expected_linksets=$(python - <<'PY' "${manifest}"
|
||||
import json, sys
|
||||
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(data["records"]["linksets"])
|
||||
PY
|
||||
)
|
||||
expected_chunks=$(python - <<'PY' "${manifest}"
|
||||
import json, sys
|
||||
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(data["records"]["advisory_chunks"])
|
||||
PY
|
||||
)
|
||||
expected_linksets_sha=$(python - <<'PY' "${manifest}"
|
||||
import json, sys
|
||||
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(data["sha256"]["linksets.ndjson"])
|
||||
PY
|
||||
)
|
||||
expected_chunks_sha=$(python - <<'PY' "${manifest}"
|
||||
import json, sys
|
||||
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(data["sha256"]["advisory_chunks.ndjson"])
|
||||
PY
|
||||
)
|
||||
|
||||
actual_linksets=$(wc -l < "${WORKDIR}/linksets.ndjson" | tr -d '[:space:]')
|
||||
actual_chunks=$(wc -l < "${WORKDIR}/advisory_chunks.ndjson" | tr -d '[:space:]')
|
||||
actual_linksets_sha=$(sha256sum "${WORKDIR}/linksets.ndjson" | awk '{print $1}')
|
||||
actual_chunks_sha=$(sha256sum "${WORKDIR}/advisory_chunks.ndjson" | awk '{print $1}')
|
||||
|
||||
if [[ "${expected_linksets}" != "${actual_linksets}" ]]; then
|
||||
echo "linksets count mismatch: expected ${expected_linksets}, got ${actual_linksets}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${expected_chunks}" != "${actual_chunks}" ]]; then
|
||||
echo "advisory_chunks count mismatch: expected ${expected_chunks}, got ${actual_chunks}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${expected_linksets_sha}" != "${actual_linksets_sha}" ]]; then
|
||||
echo "linksets sha mismatch: expected ${expected_linksets_sha}, got ${actual_linksets_sha}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${expected_chunks_sha}" != "${actual_chunks_sha}" ]]; then
|
||||
echo "advisory_chunks sha mismatch: expected ${expected_chunks_sha}, got ${actual_chunks_sha}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Dataset validation succeeded:"
|
||||
echo " linksets: ${actual_linksets}"
|
||||
echo " advisory_chunks: ${actual_chunks}"
|
||||
echo " linksets.sha256=${actual_linksets_sha}"
|
||||
echo " advisory_chunks.sha256=${actual_chunks_sha}"
|
||||
57
devops/tools/corpus/add-case.py
Normal file
57
devops/tools/corpus/add-case.py
Normal file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Add a new corpus case from a template."""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
CORPUS = ROOT / "bench" / "golden-corpus" / "categories"
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--category", required=True)
|
||||
parser.add_argument("--name", required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
case_dir = CORPUS / args.category / args.name
|
||||
(case_dir / "input").mkdir(parents=True, exist_ok=True)
|
||||
(case_dir / "expected").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
created_at = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
(case_dir / "case-manifest.json").write_text(
|
||||
'{\n'
|
||||
f' "id": "{args.name}",\n'
|
||||
f' "category": "{args.category}",\n'
|
||||
' "description": "New corpus case",\n'
|
||||
f' "createdAt": "{created_at}",\n'
|
||||
' "inputs": ["sbom-cyclonedx.json", "sbom-spdx.json", "image.tar.gz"],\n'
|
||||
' "expected": ["verdict.json", "evidence-index.json", "unknowns.json", "delta-verdict.json"]\n'
|
||||
'}\n',
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
for rel in [
|
||||
"input/sbom-cyclonedx.json",
|
||||
"input/sbom-spdx.json",
|
||||
"input/image.tar.gz",
|
||||
"expected/verdict.json",
|
||||
"expected/evidence-index.json",
|
||||
"expected/unknowns.json",
|
||||
"expected/delta-verdict.json",
|
||||
"run-manifest.json",
|
||||
]:
|
||||
target = case_dir / rel
|
||||
if target.suffix == ".gz":
|
||||
target.touch()
|
||||
else:
|
||||
target.write_text("{}\n", encoding="utf-8")
|
||||
|
||||
print(f"Created case at {case_dir}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
48
devops/tools/corpus/check-determinism.py
Normal file
48
devops/tools/corpus/check-determinism.py
Normal file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Check determinism by verifying manifest digests match stored values."""
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
MANIFEST = ROOT / "bench" / "golden-corpus" / "corpus-manifest.json"
|
||||
|
||||
|
||||
def sha256(path: Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with path.open("rb") as fh:
|
||||
while True:
|
||||
chunk = fh.read(8192)
|
||||
if not chunk:
|
||||
break
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
if not MANIFEST.exists():
|
||||
print(f"Manifest not found: {MANIFEST}")
|
||||
return 1
|
||||
|
||||
data = json.loads(MANIFEST.read_text(encoding="utf-8"))
|
||||
mismatches = []
|
||||
for case in data.get("cases", []):
|
||||
path = ROOT / case["path"]
|
||||
manifest_path = path / "case-manifest.json"
|
||||
digest = f"sha256:{sha256(manifest_path)}"
|
||||
if digest != case.get("manifestDigest"):
|
||||
mismatches.append({"id": case.get("id"), "expected": case.get("manifestDigest"), "actual": digest})
|
||||
|
||||
if mismatches:
|
||||
print(json.dumps({"status": "fail", "mismatches": mismatches}, indent=2))
|
||||
return 1
|
||||
|
||||
print(json.dumps({"status": "ok", "checked": len(data.get("cases", []))}, indent=2))
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
47
devops/tools/corpus/generate-manifest.py
Normal file
47
devops/tools/corpus/generate-manifest.py
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Generate corpus-manifest.json from case directories."""
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
CORPUS = ROOT / "bench" / "golden-corpus" / "categories"
|
||||
OUTPUT = ROOT / "bench" / "golden-corpus" / "corpus-manifest.json"
|
||||
|
||||
|
||||
def sha256(path: Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with path.open("rb") as fh:
|
||||
while True:
|
||||
chunk = fh.read(8192)
|
||||
if not chunk:
|
||||
break
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
cases = []
|
||||
for case_dir in sorted([p for p in CORPUS.rglob("*") if p.is_dir() and (p / "case-manifest.json").exists()]):
|
||||
manifest_path = case_dir / "case-manifest.json"
|
||||
cases.append({
|
||||
"id": case_dir.name,
|
||||
"path": str(case_dir.relative_to(ROOT)).replace("\\", "/"),
|
||||
"manifestDigest": f"sha256:{sha256(manifest_path)}",
|
||||
})
|
||||
|
||||
payload = {
|
||||
"generatedAt": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"caseCount": len(cases),
|
||||
"cases": cases,
|
||||
}
|
||||
|
||||
OUTPUT.write_text(json.dumps(payload, indent=2) + "\n", encoding="utf-8")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
54
devops/tools/corpus/validate-corpus.py
Normal file
54
devops/tools/corpus/validate-corpus.py
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Validate golden corpus case structure."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
CORPUS = ROOT / "bench" / "golden-corpus" / "categories"
|
||||
|
||||
REQUIRED = [
|
||||
"case-manifest.json",
|
||||
"run-manifest.json",
|
||||
"input/sbom-cyclonedx.json",
|
||||
"input/sbom-spdx.json",
|
||||
"input/image.tar.gz",
|
||||
"expected/verdict.json",
|
||||
"expected/evidence-index.json",
|
||||
"expected/unknowns.json",
|
||||
"expected/delta-verdict.json",
|
||||
]
|
||||
|
||||
|
||||
def validate_case(case_dir: Path) -> list[str]:
|
||||
missing = []
|
||||
for rel in REQUIRED:
|
||||
if not (case_dir / rel).exists():
|
||||
missing.append(rel)
|
||||
return missing
|
||||
|
||||
|
||||
def main() -> int:
|
||||
if not CORPUS.exists():
|
||||
print(f"Corpus path not found: {CORPUS}")
|
||||
return 1
|
||||
|
||||
errors = []
|
||||
cases = sorted([p for p in CORPUS.rglob("*") if p.is_dir() and (p / "case-manifest.json").exists()])
|
||||
for case in cases:
|
||||
missing = validate_case(case)
|
||||
if missing:
|
||||
errors.append({"case": str(case.relative_to(ROOT)), "missing": missing})
|
||||
|
||||
if errors:
|
||||
print(json.dumps({"status": "fail", "errors": errors}, indent=2))
|
||||
return 1
|
||||
|
||||
print(json.dumps({"status": "ok", "cases": len(cases)}, indent=2))
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
124
devops/tools/cosign/README.md
Normal file
124
devops/tools/cosign/README.md
Normal file
@@ -0,0 +1,124 @@
|
||||
# Cosign binaries (runtime/signals signing)
|
||||
|
||||
## Preferred (system)
|
||||
- Version: `v3.0.2`
|
||||
- Path: `/usr/local/bin/cosign` (installed on WSL Debian host)
|
||||
- Breaking change: v3 requires `--bundle <file>` when signing blobs; older `--output-signature`/`--output-certificate` pairs are deprecated.
|
||||
|
||||
## Offline fallback (repo-pinned)
|
||||
- Version: `v2.6.0`
|
||||
- Binary: `tools/cosign/cosign` → `tools/cosign/v2.6.0/cosign-linux-amd64`
|
||||
- SHA256: `ea5c65f99425d6cfbb5c4b5de5dac035f14d09131c1a0ea7c7fc32eab39364f9`
|
||||
- Check: `cd tools/cosign/v2.6.0 && sha256sum -c cosign_checksums.txt --ignore-missing`
|
||||
|
||||
## Usage examples
|
||||
- v3 DSSE blob: `cosign sign-blob --key cosign.key --predicate-type stella.ops/confidenceDecayConfig@v1 --bundle confidence_decay_config.sigstore.json decay/confidence_decay_config.yaml`
|
||||
- v3 verify: `cosign verify-blob --bundle confidence_decay_config.sigstore.json decay/confidence_decay_config.yaml`
|
||||
- To force offline fallback, export `PATH=./tools/cosign:$PATH` (ensures v2.6.0 is used).
|
||||
|
||||
## CI Workflow: signals-dsse-sign.yml
|
||||
|
||||
The `.gitea/workflows/signals-dsse-sign.yml` workflow automates DSSE signing for Signals artifacts.
|
||||
|
||||
### Required Secrets
|
||||
| Secret | Description | Required |
|
||||
|--------|-------------|----------|
|
||||
| `COSIGN_PRIVATE_KEY_B64` | Base64-encoded cosign private key | Yes (for production) |
|
||||
| `COSIGN_PASSWORD` | Password for the private key | If key is encrypted |
|
||||
| `CI_EVIDENCE_LOCKER_TOKEN` | Token for Evidence Locker upload | Optional |
|
||||
|
||||
### Trigger Options
|
||||
1. **Automatic**: On push to `main` when signals artifacts change
|
||||
2. **Manual**: Via workflow_dispatch with options:
|
||||
- `out_dir`: Output directory (default: `evidence-locker/signals/2025-12-01`)
|
||||
- `allow_dev_key`: Set to `1` for testing with dev key
|
||||
|
||||
### Setting Up CI Secrets
|
||||
```bash
|
||||
# Generate production key pair (do this once, securely)
|
||||
cosign generate-key-pair
|
||||
|
||||
# Base64 encode the private key
|
||||
cat cosign.key | base64 -w0 > cosign.key.b64
|
||||
|
||||
# Add to Gitea secrets:
|
||||
# - COSIGN_PRIVATE_KEY_B64: contents of cosign.key.b64
|
||||
# - COSIGN_PASSWORD: password used during key generation
|
||||
```
|
||||
|
||||
## CI / secrets (manual usage)
|
||||
- CI should provide a base64-encoded private key via secret `COSIGN_PRIVATE_KEY_B64` and optional password in `COSIGN_PASSWORD`.
|
||||
- Example bootstrap in jobs:
|
||||
```bash
|
||||
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > /tmp/cosign.key
|
||||
chmod 600 /tmp/cosign.key
|
||||
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" cosign version
|
||||
```
|
||||
- For local dev, copy your own key to `tools/cosign/cosign.key` or export `COSIGN_PRIVATE_KEY_B64` before running signing scripts. Never commit real keys; only `cosign.key.example` lives in git.
|
||||
|
||||
## Development signing key
|
||||
|
||||
A development key pair is provided for local testing and smoke tests:
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `tools/cosign/cosign.dev.key` | Private key (password-protected) |
|
||||
| `tools/cosign/cosign.dev.pub` | Public key for verification |
|
||||
|
||||
### Usage
|
||||
```bash
|
||||
# Sign signals artifacts with dev key
|
||||
COSIGN_ALLOW_DEV_KEY=1 COSIGN_PASSWORD=stellaops-dev \
|
||||
OUT_DIR=docs/modules/signals/dev-test \
|
||||
tools/cosign/sign-signals.sh
|
||||
|
||||
# Verify a signature
|
||||
cosign verify-blob \
|
||||
--key tools/cosign/cosign.dev.pub \
|
||||
--bundle docs/modules/signals/dev-test/confidence_decay_config.sigstore.json \
|
||||
docs/modules/signals/decay/confidence_decay_config.yaml
|
||||
```
|
||||
|
||||
### Security Notes
|
||||
- Password: `stellaops-dev` (do not reuse elsewhere)
|
||||
- **NOT** for production or Evidence Locker ingestion
|
||||
- Real signing requires the Signals Guild key via `COSIGN_PRIVATE_KEY_B64` (CI) or `tools/cosign/cosign.key` (local drop-in)
|
||||
- `sign-signals.sh` requires `COSIGN_ALLOW_DEV_KEY=1` to use the dev key; otherwise it refuses
|
||||
- The signing helper disables tlog upload (`--tlog-upload=false`) and auto-accepts prompts (`--yes`) for offline runs
|
||||
|
||||
## Signing Scripts
|
||||
|
||||
### sign-signals.sh
|
||||
Signs decay config, unknowns manifest, and heuristics catalog with DSSE envelopes.
|
||||
|
||||
```bash
|
||||
# Production (CI secret or cosign.key drop-in)
|
||||
OUT_DIR=evidence-locker/signals/2025-12-01 tools/cosign/sign-signals.sh
|
||||
|
||||
# Development (dev key)
|
||||
COSIGN_ALLOW_DEV_KEY=1 COSIGN_PASSWORD=stellaops-dev \
|
||||
OUT_DIR=docs/modules/signals/dev-test \
|
||||
tools/cosign/sign-signals.sh
|
||||
```
|
||||
|
||||
### Key Resolution Order
|
||||
1. `COSIGN_KEY_FILE` environment variable
|
||||
2. `COSIGN_PRIVATE_KEY_B64` (decoded to temp file)
|
||||
3. `tools/cosign/cosign.key` (production drop-in)
|
||||
4. `tools/cosign/cosign.dev.key` (only if `COSIGN_ALLOW_DEV_KEY=1`)
|
||||
|
||||
### sign-authority-gaps.sh
|
||||
Signs Authority gap artefacts (AU1–AU10, RR1–RR10) under `docs/modules/authority/gaps/artifacts/`.
|
||||
|
||||
```
|
||||
# Production (Authority key via CI secret or cosign.key drop-in)
|
||||
OUT_DIR=docs/modules/authority/gaps/dsse/2025-12-04 tools/cosign/sign-authority-gaps.sh
|
||||
|
||||
# Development (dev key, smoke only)
|
||||
COSIGN_ALLOW_DEV_KEY=1 COSIGN_PASSWORD=stellaops-dev \
|
||||
OUT_DIR=docs/modules/authority/gaps/dev-smoke/2025-12-04 \
|
||||
tools/cosign/sign-authority-gaps.sh
|
||||
```
|
||||
|
||||
- Outputs bundles or dsse signatures plus `SHA256SUMS` in `OUT_DIR`.
|
||||
- tlog upload disabled (`--tlog-upload=false`) and prompts auto-accepted (`--yes`) for offline use.
|
||||
1
devops/tools/cosign/cosign
Symbolic link
1
devops/tools/cosign/cosign
Symbolic link
@@ -0,0 +1 @@
|
||||
v2.6.0/cosign-linux-amd64
|
||||
11
devops/tools/cosign/cosign.dev.key
Normal file
11
devops/tools/cosign/cosign.dev.key
Normal file
@@ -0,0 +1,11 @@
|
||||
-----BEGIN ENCRYPTED SIGSTORE PRIVATE KEY-----
|
||||
eyJrZGYiOnsibmFtZSI6InNjcnlwdCIsInBhcmFtcyI6eyJOIjo2NTUzNiwiciI6
|
||||
OCwicCI6MX0sInNhbHQiOiJ5dlhpaXliR2lTR0NPS2x0Q2M1dlFhTy91S3pBVzNs
|
||||
Skl3QTRaU2dEMTAwPSJ9LCJjaXBoZXIiOnsibmFtZSI6Im5hY2wvc2VjcmV0Ym94
|
||||
Iiwibm9uY2UiOiIyNHA0T2xJZnJxdnhPVnM3dlY2MXNwVGpkNk80cVBEVCJ9LCJj
|
||||
aXBoZXJ0ZXh0IjoiTHRWSGRqVi94MXJrYXhscGxJbVB5dkVtc2NBYTB5dW5oakZ5
|
||||
UUFiZ1RSNVdZL3lCS0tYMWdFb09hclZDWksrQU0yY0tIM2tJQWlJNWlMd1AvV3c5
|
||||
Q3k2SVY1ek4za014cExpcjJ1QVZNV3c3Y3BiYUhnNjV4TzNOYkEwLzJOSi84R0dN
|
||||
NWt1QXhJRWsraER3ZWJ4Tld4WkRtNEZ4NTJVcVJxa2NPT09vNk9xWXB4OWFMaVZw
|
||||
RjgzRElGZFpRK2R4K05RUnUxUmNrKzBtOHc9PSJ9
|
||||
-----END ENCRYPTED SIGSTORE PRIVATE KEY-----
|
||||
4
devops/tools/cosign/cosign.dev.pub
Normal file
4
devops/tools/cosign/cosign.dev.pub
Normal file
@@ -0,0 +1,4 @@
|
||||
-----BEGIN PUBLIC KEY-----
|
||||
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEfoI+9RFCTcfjeMqpCQ3FAyvKwBQU
|
||||
YAIM2cfDR8W98OxnXV+gfV5Dhfoi8qofAnG/vC7DbBlX2t/gT7GKUZAChA==
|
||||
-----END PUBLIC KEY-----
|
||||
8
devops/tools/cosign/cosign.key.example
Normal file
8
devops/tools/cosign/cosign.key.example
Normal file
@@ -0,0 +1,8 @@
|
||||
# Placeholder development cosign key
|
||||
#
|
||||
# Do not use in production. Generate your own:
|
||||
# cosign generate-key-pair
|
||||
#
|
||||
# Store the private key securely (e.g., CI secret COSIGN_PRIVATE_KEY_B64).
|
||||
#
|
||||
# This file exists only as a path stub for tooling; it is not a real key.
|
||||
BIN
devops/tools/cosign/v2.6.0/cosign-linux-amd64
Normal file
BIN
devops/tools/cosign/v2.6.0/cosign-linux-amd64
Normal file
Binary file not shown.
40
devops/tools/cosign/v2.6.0/cosign_checksums.txt
Normal file
40
devops/tools/cosign/v2.6.0/cosign_checksums.txt
Normal file
@@ -0,0 +1,40 @@
|
||||
e8c634db1252725eabfd517f02e6ebf0d07bfba5b4779d7b45ef373ceff07b38 cosign-2.6.0-1.aarch64.rpm
|
||||
9de55601c34fe7a8eaecb7a2fab93da032dd91d423a04ae6ac17e3f5ed99ec72 cosign-2.6.0-1.armv7hl.rpm
|
||||
f7281a822306c35f2bd66c055ba6f77a7298de3375a401b12664035b8b323fdf cosign-2.6.0-1.ppc64le.rpm
|
||||
814b890a07b56bcc6a42dfdf9004fadfe45c112e9b11a0c2f4ebf45568e72b4c cosign-2.6.0-1.riscv64.rpm
|
||||
19241a09cc065f062d63a9c9ce45ed7c7ff839b93672be4688334b925809d266 cosign-2.6.0-1.s390x.rpm
|
||||
52709467f072043f24553c6dd1e0f287eeeedb23340dd90a4438b8506df0a0bc cosign-2.6.0-1.x86_64.rpm
|
||||
83b0fb42bc265e62aef7de49f4979b7957c9b7320d362a9f20046b2f823330f3 cosign-darwin-amd64
|
||||
3bcbcfc41d89e162e47ba08f70ffeffaac567f663afb3545c0265a5041ce652d cosign-darwin-amd64_2.6.0_darwin_amd64.sbom.json
|
||||
dea5b83b8b375b99ac803c7bdb1f798963dbeb47789ceb72153202e7f20e8d07 cosign-darwin-arm64
|
||||
c09a84869eb31fcf334e54d0a9f81bf466ba7444dc975a8fe46b94d742288980 cosign-darwin-arm64_2.6.0_darwin_arm64.sbom.json
|
||||
ea5c65f99425d6cfbb5c4b5de5dac035f14d09131c1a0ea7c7fc32eab39364f9 cosign-linux-amd64
|
||||
b4ccc276a5cc326f87d81fd1ae12f12a8dba64214ec368a39401522cccae7f9a cosign-linux-amd64_2.6.0_linux_amd64.sbom.json
|
||||
641e05c21ce423cd263a49b1f9ffca58e2df022cb12020dcea63f8317c456950 cosign-linux-arm
|
||||
e09684650882fd721ed22b716ffc399ee11426cd4d1c9b4fec539cba8bf46b86 cosign-linux-arm64
|
||||
d05d37f6965c3f3c77260171289281dbf88d1f2b07e865bf9d4fd94d9f2fe5c4 cosign-linux-arm64_2.6.0_linux_arm64.sbom.json
|
||||
1b8b96535a7c30dbecead51ac3f51f559b31d8ab1dd4842562f857ebb1941fa5 cosign-linux-arm_2.6.0_linux_arm.sbom.json
|
||||
6fa93dbd97664ccce6c3e5221e22e14547b0d202ba829e2b34a3479266b33751 cosign-linux-pivkey-pkcs11key-amd64
|
||||
17b9803701f5908476d5904492b7a4d1568b86094c3fbb5a06afaa62a6910e8c cosign-linux-pivkey-pkcs11key-amd64_2.6.0_linux_amd64.sbom.json
|
||||
fbb78394e6fc19a2f34fea4ba03ea796aca84b666b6cdf65f46775f295fc9103 cosign-linux-pivkey-pkcs11key-arm64
|
||||
35ac308bd9c59844e056f6251ab76184bfc321cb1b3ac337fdb94a9a289d4d44 cosign-linux-pivkey-pkcs11key-arm64_2.6.0_linux_arm64.sbom.json
|
||||
bd9cc643ec8a517ca66b22221b830dc9d6064bd4f3b76579e4e28b6af5cfba5f cosign-linux-ppc64le
|
||||
ef04b0e087b95ce1ba7a902ecc962e50bfc974da0bd6b5db59c50880215a3f06 cosign-linux-ppc64le_2.6.0_linux_ppc64le.sbom.json
|
||||
17c8ff6a5dc48d3802b511c3eb7495da6142397ace28af9a1baa58fb34fad75c cosign-linux-riscv64
|
||||
2007628a662808f221dc1983d9fba2676df32bb98717f89360cd191c929492ba cosign-linux-riscv64_2.6.0_linux_riscv64.sbom.json
|
||||
7f7f042e7131950c658ff87079ac9080e7d64392915f06811f06a96238c242c1 cosign-linux-s390x
|
||||
e22a35083b21552c80bafb747c022aa2aad302c861a392199bc2a8fad22dd6b5 cosign-linux-s390x_2.6.0_linux_s390x.sbom.json
|
||||
7beb4dd1e19a72c328bbf7c0d7342d744edbf5cbb082f227b2b76e04a21c16ef cosign-windows-amd64.exe
|
||||
8110eab8c5842caf93cf05dd26f260b6836d93b0263e49e06c1bd22dd5abb82c cosign-windows-amd64.exe_2.6.0_windows_amd64.sbom.json
|
||||
7713d587f8668ce8f2a48556ee17f47c281cfb90102adfdb7182de62bc016cab cosign_2.6.0_aarch64.apk
|
||||
c51b6437559624ef88b29a1ddd88d0782549b585dbbae0a5cb2fcc02bec72687 cosign_2.6.0_amd64.deb
|
||||
438baaa35101e9982081c6450a44ea19e04cd4d2aba283ed52242e451736990b cosign_2.6.0_arm64.deb
|
||||
8dc33858a68e18bf0cc2cb18c2ba0a7d829aa59ad3125366b24477e7d6188024 cosign_2.6.0_armhf.deb
|
||||
88397077deee943690033276eef5206f7c60a30ea5f6ced66a51601ce79d0d0e cosign_2.6.0_armv7.apk
|
||||
ca45b82cde86634705187f2361363e67c70c23212283594ff942d583a543f9dd cosign_2.6.0_ppc64el.deb
|
||||
497f1a6d3899493153a4426286e673422e357224f3f931fdc028455db2fb5716 cosign_2.6.0_ppc64le.apk
|
||||
1e37d9c3d278323095899897236452858c0bc49b52a48c3bcf8ce7a236bf2ee1 cosign_2.6.0_riscv64.apk
|
||||
f2f65cf3d115fa5b25c61f6692449df2f4da58002a99e3efacc52a848fd3bca8 cosign_2.6.0_riscv64.deb
|
||||
af0a62231880fd3495bbd1f5d4c64384034464b80930b7ffcd819d7152e75759 cosign_2.6.0_s390x.apk
|
||||
e282d9337e4ba163a48ff1175855a6f6d6fbb562bc6c576c93944a6126984203 cosign_2.6.0_s390x.deb
|
||||
382a842b2242656ecd442ae461c4dc454a366ed50d41a2dafcce8b689bfd03e4 cosign_2.6.0_x86_64.apk
|
||||
220
devops/tools/crypto/download-cryptopro-playwright.cjs
Normal file
220
devops/tools/crypto/download-cryptopro-playwright.cjs
Normal file
@@ -0,0 +1,220 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* CryptoPro CSP downloader (Playwright-driven).
|
||||
*
|
||||
* Navigates cryptopro.ru downloads page, optionally fills login form, and selects
|
||||
* Linux packages (.rpm/.deb/.tar.gz/.tgz/.bin) under the CSP Linux section.
|
||||
*
|
||||
* Environment:
|
||||
* - CRYPTOPRO_URL (default: https://cryptopro.ru/products/csp/downloads#latest_csp50r3_linux)
|
||||
* - CRYPTOPRO_EMAIL / CRYPTOPRO_PASSWORD (default demo creds: contact@stella-ops.org / Hoko33JD3nj3aJD.)
|
||||
* - CRYPTOPRO_DRY_RUN (default: 1) -> list candidates, do not download
|
||||
* - CRYPTOPRO_OUTPUT_DIR (default: /opt/cryptopro/downloads)
|
||||
* - CRYPTOPRO_OUTPUT_FILE (optional: force a specific output filename/path)
|
||||
* - CRYPTOPRO_UNPACK (default: 0) -> attempt to unpack tar.gz/tgz/rpm/deb
|
||||
*/
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { spawnSync } = require('child_process');
|
||||
const { chromium } = require('playwright-chromium');
|
||||
|
||||
const url = process.env.CRYPTOPRO_URL || 'https://cryptopro.ru/products/csp/downloads#latest_csp50r3_linux';
|
||||
const email = process.env.CRYPTOPRO_EMAIL || 'contact@stella-ops.org';
|
||||
const password = process.env.CRYPTOPRO_PASSWORD || 'Hoko33JD3nj3aJD.';
|
||||
const dryRun = (process.env.CRYPTOPRO_DRY_RUN || '1') !== '0';
|
||||
const outputDir = process.env.CRYPTOPRO_OUTPUT_DIR || '/opt/cryptopro/downloads';
|
||||
const outputFile = process.env.CRYPTOPRO_OUTPUT_FILE;
|
||||
const unpack = (process.env.CRYPTOPRO_UNPACK || '0') === '1';
|
||||
const navTimeout = parseInt(process.env.CRYPTOPRO_NAV_TIMEOUT || '60000', 10);
|
||||
|
||||
const linuxPattern = /\.(rpm|deb|tar\.gz|tgz|bin)(\?|$)/i;
|
||||
const debugLinks = (process.env.CRYPTOPRO_DEBUG || '0') === '1';
|
||||
|
||||
function log(msg) {
|
||||
process.stdout.write(`${msg}\n`);
|
||||
}
|
||||
|
||||
function warn(msg) {
|
||||
process.stderr.write(`[WARN] ${msg}\n`);
|
||||
}
|
||||
|
||||
async function maybeLogin(page) {
|
||||
const emailSelector = 'input[type="email"], input[name*="email" i], input[name*="login" i], input[name="name"]';
|
||||
const passwordSelector = 'input[type="password"], input[name*="password" i]';
|
||||
const submitSelector = 'button[type="submit"], input[type="submit"]';
|
||||
|
||||
const emailInput = await page.$(emailSelector);
|
||||
const passwordInput = await page.$(passwordSelector);
|
||||
if (emailInput && passwordInput) {
|
||||
log('[login] Form detected; submitting credentials');
|
||||
await emailInput.fill(email);
|
||||
await passwordInput.fill(password);
|
||||
const submit = await page.$(submitSelector);
|
||||
if (submit) {
|
||||
await Promise.all([
|
||||
page.waitForNavigation({ waitUntil: 'networkidle', timeout: 15000 }).catch(() => {}),
|
||||
submit.click()
|
||||
]);
|
||||
} else {
|
||||
await passwordInput.press('Enter');
|
||||
await page.waitForTimeout(2000);
|
||||
}
|
||||
} else {
|
||||
log('[login] No login form detected; continuing anonymously');
|
||||
}
|
||||
}
|
||||
|
||||
async function findLinuxLinks(page) {
|
||||
const targets = [page, ...page.frames()];
|
||||
const hrefs = [];
|
||||
|
||||
// Collect href/data-href/data-url across main page + frames
|
||||
for (const target of targets) {
|
||||
try {
|
||||
const collected = await target.$$eval('a[href], [data-href], [data-url]', (els) =>
|
||||
els
|
||||
.map((el) => el.getAttribute('href') || el.getAttribute('data-href') || el.getAttribute('data-url'))
|
||||
.filter((href) => typeof href === 'string')
|
||||
);
|
||||
hrefs.push(...collected);
|
||||
} catch (err) {
|
||||
warn(`[scan] Failed to collect links from frame: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
const unique = Array.from(new Set(hrefs));
|
||||
return unique.filter((href) => linuxPattern.test(href));
|
||||
}
|
||||
|
||||
function unpackIfSupported(filePath) {
|
||||
if (!unpack) {
|
||||
return;
|
||||
}
|
||||
const cwd = path.dirname(filePath);
|
||||
if (filePath.endsWith('.tar.gz') || filePath.endsWith('.tgz')) {
|
||||
const res = spawnSync('tar', ['-xzf', filePath, '-C', cwd], { stdio: 'inherit' });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Extracted ${filePath}`);
|
||||
} else {
|
||||
warn(`[unpack] Failed to extract ${filePath}`);
|
||||
}
|
||||
} else if (filePath.endsWith('.rpm')) {
|
||||
const res = spawnSync('bash', ['-lc', `rpm2cpio "${filePath}" | cpio -idmv`], { stdio: 'inherit', cwd });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Extracted RPM ${filePath}`);
|
||||
} else {
|
||||
warn(`[unpack] Failed to extract RPM ${filePath}`);
|
||||
}
|
||||
} else if (filePath.endsWith('.deb')) {
|
||||
const res = spawnSync('dpkg-deb', ['-x', filePath, cwd], { stdio: 'inherit' });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Extracted DEB ${filePath}`);
|
||||
} else {
|
||||
warn(`[unpack] Failed to extract DEB ${filePath}`);
|
||||
}
|
||||
} else if (filePath.endsWith('.bin')) {
|
||||
const res = spawnSync('chmod', ['+x', filePath], { stdio: 'inherit' });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Marked ${filePath} as executable (self-extract expected)`);
|
||||
} else {
|
||||
warn(`[unpack] Could not mark ${filePath} executable`);
|
||||
}
|
||||
} else {
|
||||
warn(`[unpack] Skipping unsupported archive type for ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
if (email === 'contact@stella-ops.org' && password === 'Hoko33JD3nj3aJD.') {
|
||||
warn('Using default demo credentials; set CRYPTOPRO_EMAIL/CRYPTOPRO_PASSWORD to real customer creds.');
|
||||
}
|
||||
|
||||
const browser = await chromium.launch({ headless: true });
|
||||
const context = await browser.newContext({
|
||||
acceptDownloads: true,
|
||||
httpCredentials: { username: email, password }
|
||||
});
|
||||
const page = await context.newPage();
|
||||
log(`[nav] Opening ${url}`);
|
||||
try {
|
||||
await page.goto(url, { waitUntil: 'networkidle', timeout: navTimeout });
|
||||
} catch (err) {
|
||||
warn(`[nav] Navigation at networkidle failed (${err.message}); retrying with waitUntil=load`);
|
||||
await page.goto(url, { waitUntil: 'load', timeout: navTimeout });
|
||||
}
|
||||
log(`[nav] Landed on ${page.url()}`);
|
||||
await maybeLogin(page);
|
||||
await page.waitForTimeout(2000);
|
||||
|
||||
const loginGate =
|
||||
page.url().includes('/user') ||
|
||||
(await page.$('form#user-login, form[id*="user-login"], .captcha, #captcha-container'));
|
||||
if (loginGate) {
|
||||
warn('[auth] Login/captcha gate detected on downloads page; automated fetch blocked. Provide session/cookies or run headful to solve manually.');
|
||||
await browser.close();
|
||||
return 2;
|
||||
}
|
||||
|
||||
let links = await findLinuxLinks(page);
|
||||
if (links.length === 0) {
|
||||
await page.waitForTimeout(1500);
|
||||
await page.evaluate(() => window.scrollTo(0, document.body.scrollHeight));
|
||||
await page.waitForTimeout(2000);
|
||||
links = await findLinuxLinks(page);
|
||||
}
|
||||
if (links.length === 0) {
|
||||
if (debugLinks) {
|
||||
const targetDir = outputFile ? path.dirname(outputFile) : outputDir;
|
||||
await fs.promises.mkdir(targetDir, { recursive: true });
|
||||
const debugHtml = path.join(targetDir, 'cryptopro-download-page.html');
|
||||
await fs.promises.writeFile(debugHtml, await page.content(), 'utf8');
|
||||
log(`[debug] Saved page HTML to ${debugHtml}`);
|
||||
const allLinks = await page.$$eval('a[href], [data-href], [data-url]', (els) =>
|
||||
els
|
||||
.map((el) => el.getAttribute('href') || el.getAttribute('data-href') || el.getAttribute('data-url'))
|
||||
.filter((href) => typeof href === 'string')
|
||||
);
|
||||
log(`[debug] Total link-like attributes: ${allLinks.length}`);
|
||||
allLinks.slice(0, 20).forEach((href, idx) => log(` [all ${idx + 1}] ${href}`));
|
||||
}
|
||||
warn('No Linux download links found on page.');
|
||||
await browser.close();
|
||||
return 1;
|
||||
}
|
||||
|
||||
log(`[scan] Found ${links.length} Linux candidate links`);
|
||||
links.slice(0, 10).forEach((href, idx) => log(` [${idx + 1}] ${href}`));
|
||||
|
||||
if (dryRun) {
|
||||
log('[mode] Dry-run enabled; not downloading. Set CRYPTOPRO_DRY_RUN=0 to fetch.');
|
||||
await browser.close();
|
||||
return 0;
|
||||
}
|
||||
|
||||
const target = links[0];
|
||||
log(`[download] Fetching ${target}`);
|
||||
const [download] = await Promise.all([
|
||||
page.waitForEvent('download', { timeout: 30000 }),
|
||||
page.goto(target).catch(() => page.click(`a[href="${target}"]`).catch(() => {}))
|
||||
]);
|
||||
|
||||
const targetDir = outputFile ? path.dirname(outputFile) : outputDir;
|
||||
await fs.promises.mkdir(targetDir, { recursive: true });
|
||||
const suggested = download.suggestedFilename();
|
||||
const outPath = outputFile ? outputFile : path.join(outputDir, suggested);
|
||||
await download.saveAs(outPath);
|
||||
log(`[download] Saved to ${outPath}`);
|
||||
|
||||
unpackIfSupported(outPath);
|
||||
|
||||
await browser.close();
|
||||
return 0;
|
||||
}
|
||||
|
||||
main()
|
||||
.then((code) => process.exit(code))
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
69
devops/tools/crypto/package-rootpack-ru.sh
Normal file
69
devops/tools/crypto/package-rootpack-ru.sh
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(git rev-parse --show-toplevel)"
|
||||
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
OUTPUT_ROOT="${1:-${ROOT_DIR}/build/rootpack_ru_${TIMESTAMP}}"
|
||||
ARTIFACT_DIR="${OUTPUT_ROOT}/artifacts"
|
||||
DOC_DIR="${OUTPUT_ROOT}/docs"
|
||||
CONFIG_DIR="${OUTPUT_ROOT}/config"
|
||||
TRUST_DIR="${OUTPUT_ROOT}/trust"
|
||||
|
||||
mkdir -p "$ARTIFACT_DIR" "$DOC_DIR" "$CONFIG_DIR" "$TRUST_DIR"
|
||||
|
||||
publish_plugin() {
|
||||
local project="$1"
|
||||
local name="$2"
|
||||
local publish_dir="${ARTIFACT_DIR}/${name}"
|
||||
echo "[rootpack-ru] Publishing ${project} -> ${publish_dir}"
|
||||
dotnet publish "$project" -c Release -o "$publish_dir" --nologo >/dev/null
|
||||
}
|
||||
|
||||
publish_plugin "src/__Libraries/StellaOps.Cryptography.Plugin.CryptoPro/StellaOps.Cryptography.Plugin.CryptoPro.csproj" "StellaOps.Cryptography.Plugin.CryptoPro"
|
||||
publish_plugin "src/__Libraries/StellaOps.Cryptography.Plugin.Pkcs11Gost/StellaOps.Cryptography.Plugin.Pkcs11Gost.csproj" "StellaOps.Cryptography.Plugin.Pkcs11Gost"
|
||||
|
||||
cp docs/security/rootpack_ru_validation.md "$DOC_DIR/"
|
||||
cp docs/security/crypto-routing-audit-2025-11-07.md "$DOC_DIR/"
|
||||
cp docs/security/rootpack_ru_package.md "$DOC_DIR/"
|
||||
cp etc/rootpack/ru/crypto.profile.yaml "$CONFIG_DIR/rootpack_ru.crypto.yaml"
|
||||
|
||||
if [ "${INCLUDE_GOST_VALIDATION:-1}" != "0" ]; then
|
||||
candidate="${OPENSSL_GOST_LOG_DIR:-}"
|
||||
if [ -z "$candidate" ]; then
|
||||
candidate="$(ls -d "${ROOT_DIR}"/logs/openssl_gost_validation_* "${ROOT_DIR}"/logs/rootpack_ru_*/openssl_gost 2>/dev/null | sort | tail -n 1 || true)"
|
||||
fi
|
||||
|
||||
if [ -n "$candidate" ] && [ -d "$candidate" ]; then
|
||||
mkdir -p "${DOC_DIR}/gost-validation"
|
||||
cp -r "$candidate" "${DOC_DIR}/gost-validation/latest"
|
||||
fi
|
||||
fi
|
||||
|
||||
shopt -s nullglob
|
||||
for pem in "$ROOT_DIR"/certificates/russian_trusted_*; do
|
||||
cp "$pem" "$TRUST_DIR/"
|
||||
done
|
||||
shopt -u nullglob
|
||||
|
||||
cat <<README >"${OUTPUT_ROOT}/README.txt"
|
||||
RootPack_RU bundle (${TIMESTAMP})
|
||||
--------------------------------
|
||||
Contents:
|
||||
- artifacts/ : Sovereign crypto plug-ins published for net10.0 (CryptoPro + PKCS#11)
|
||||
- config/rootpack_ru.crypto.yaml : example configuration binding registry profiles
|
||||
- docs/ : validation + audit documentation
|
||||
- trust/ : Russian trust anchor PEM bundle copied from certificates/
|
||||
|
||||
Usage:
|
||||
1. Review docs/rootpack_ru_package.md for installation steps.
|
||||
2. Execute scripts/crypto/run-rootpack-ru-tests.sh (or CI equivalent) and attach the logs to this bundle.
|
||||
3. Record hardware validation outputs per docs/rootpack_ru_validation.md and store alongside this directory.
|
||||
README
|
||||
|
||||
if [[ "${PACKAGE_TAR:-1}" != "0" ]]; then
|
||||
tarball="${OUTPUT_ROOT}.tar.gz"
|
||||
echo "[rootpack-ru] Creating ${tarball}"
|
||||
tar -czf "$tarball" -C "$(dirname "$OUTPUT_ROOT")" "$(basename "$OUTPUT_ROOT")"
|
||||
fi
|
||||
|
||||
echo "[rootpack-ru] Bundle staged under $OUTPUT_ROOT"
|
||||
25
devops/tools/crypto/run-cryptopro-tests.ps1
Normal file
25
devops/tools/crypto/run-cryptopro-tests.ps1
Normal file
@@ -0,0 +1,25 @@
|
||||
param(
|
||||
[string]$Configuration = "Release"
|
||||
)
|
||||
|
||||
if (-not $IsWindows) {
|
||||
Write-Host "CryptoPro tests require Windows" -ForegroundColor Yellow
|
||||
exit 0
|
||||
}
|
||||
|
||||
if (-not (Get-Command dotnet -ErrorAction SilentlyContinue)) {
|
||||
Write-Host "dotnet SDK not found" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Opt-in flag to avoid accidental runs on agents without CryptoPro CSP installed
|
||||
$env:STELLAOPS_CRYPTO_PRO_ENABLED = "1"
|
||||
|
||||
Write-Host "Running CryptoPro-only tests..." -ForegroundColor Cyan
|
||||
|
||||
pushd $PSScriptRoot\..\..
|
||||
try {
|
||||
dotnet test src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj -c $Configuration --filter CryptoProGostSignerTests
|
||||
} finally {
|
||||
popd
|
||||
}
|
||||
96
devops/tools/crypto/run-rootpack-ru-tests.sh
Normal file
96
devops/tools/crypto/run-rootpack-ru-tests.sh
Normal file
@@ -0,0 +1,96 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(git rev-parse --show-toplevel)"
|
||||
DEFAULT_LOG_ROOT="${ROOT_DIR}/logs/rootpack_ru_$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
LOG_ROOT="${ROOTPACK_LOG_DIR:-$DEFAULT_LOG_ROOT}"
|
||||
ALLOW_PARTIAL="${ALLOW_PARTIAL:-1}"
|
||||
mkdir -p "$LOG_ROOT"
|
||||
|
||||
PROJECTS=(
|
||||
"src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj"
|
||||
"src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj"
|
||||
"src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.csproj"
|
||||
)
|
||||
if [ "${RUN_SCANNER:-1}" != "1" ]; then
|
||||
PROJECTS=("${PROJECTS[0]}")
|
||||
echo "[rootpack-ru] RUN_SCANNER=0 set; skipping scanner test suites"
|
||||
fi
|
||||
|
||||
run_test() {
|
||||
local project="$1"
|
||||
local extra_props=""
|
||||
|
||||
if [ "${STELLAOPS_ENABLE_CRYPTO_PRO:-""}" = "1" ]; then
|
||||
extra_props+=" /p:StellaOpsEnableCryptoPro=true"
|
||||
fi
|
||||
|
||||
if [ "${STELLAOPS_ENABLE_PKCS11:-""}" = "1" ]; then
|
||||
extra_props+=" /p:StellaOpsEnablePkcs11=true"
|
||||
fi
|
||||
local safe_name
|
||||
safe_name="$(basename "${project%.csproj}")"
|
||||
local log_file="${LOG_ROOT}/${safe_name}.log"
|
||||
local trx_name="${safe_name}.trx"
|
||||
|
||||
echo "[rootpack-ru] Running tests for ${project}" | tee "$log_file"
|
||||
dotnet test "$project" \
|
||||
--nologo \
|
||||
--verbosity minimal \
|
||||
--results-directory "$LOG_ROOT" \
|
||||
--logger "trx;LogFileName=${trx_name}" ${extra_props} | tee -a "$log_file"
|
||||
}
|
||||
|
||||
PROJECT_SUMMARY=()
|
||||
for project in "${PROJECTS[@]}"; do
|
||||
safe_name="$(basename "${project%.csproj}")"
|
||||
if run_test "$project"; then
|
||||
PROJECT_SUMMARY+=("$project|$safe_name|PASS")
|
||||
echo "[rootpack-ru] Wrote logs for ${project} -> ${LOG_ROOT}/${safe_name}.log"
|
||||
else
|
||||
PROJECT_SUMMARY+=("$project|$safe_name|FAIL")
|
||||
echo "[rootpack-ru] Test run failed for ${project}; see ${LOG_ROOT}/${safe_name}.log"
|
||||
if [ "${ALLOW_PARTIAL}" != "1" ]; then
|
||||
echo "[rootpack-ru] ALLOW_PARTIAL=0; aborting harness."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
GOST_SUMMARY="skipped (docker not available)"
|
||||
if [ "${RUN_GOST_VALIDATION:-1}" = "1" ]; then
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
echo "[rootpack-ru] Running OpenSSL GOST validation harness"
|
||||
OPENSSL_GOST_LOG_DIR="${LOG_ROOT}/openssl_gost"
|
||||
if OPENSSL_GOST_LOG_DIR="${OPENSSL_GOST_LOG_DIR}" bash "${ROOT_DIR}/scripts/crypto/validate-openssl-gost.sh"; then
|
||||
if [ -d "${OPENSSL_GOST_LOG_DIR}" ] && [ -f "${OPENSSL_GOST_LOG_DIR}/summary.txt" ]; then
|
||||
GOST_SUMMARY="$(cat "${OPENSSL_GOST_LOG_DIR}/summary.txt")"
|
||||
else
|
||||
GOST_SUMMARY="completed (see logs/openssl_gost_validation_*)"
|
||||
fi
|
||||
else
|
||||
GOST_SUMMARY="failed (see logs/openssl_gost_validation_*)"
|
||||
fi
|
||||
else
|
||||
echo "[rootpack-ru] Docker not available; skipping OpenSSL GOST validation."
|
||||
fi
|
||||
fi
|
||||
|
||||
{
|
||||
echo "RootPack_RU deterministic test harness"
|
||||
echo "Generated: $(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
echo "Log Directory: $LOG_ROOT"
|
||||
echo ""
|
||||
echo "Projects:"
|
||||
for entry in "${PROJECT_SUMMARY[@]}"; do
|
||||
project_path="${entry%%|*}"
|
||||
rest="${entry#*|}"
|
||||
safe_name="${rest%%|*}"
|
||||
status="${rest##*|}"
|
||||
printf ' - %s (log: %s.log, trx: %s.trx) [%s]\n' "$project_path" "$safe_name" "$safe_name" "$status"
|
||||
done
|
||||
echo ""
|
||||
echo "GOST validation: ${GOST_SUMMARY}"
|
||||
} > "$LOG_ROOT/README.tests"
|
||||
|
||||
echo "Logs and TRX files available under $LOG_ROOT"
|
||||
42
devops/tools/crypto/run-sim-smoke.ps1
Normal file
42
devops/tools/crypto/run-sim-smoke.ps1
Normal file
@@ -0,0 +1,42 @@
|
||||
param(
|
||||
[string] $BaseUrl = "http://localhost:5000",
|
||||
[string] $SimProfile = "sm"
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$repoRoot = Resolve-Path "$PSScriptRoot/../.."
|
||||
|
||||
Push-Location $repoRoot
|
||||
$job = $null
|
||||
try {
|
||||
Write-Host "Building sim service and smoke harness..."
|
||||
dotnet build ops/crypto/sim-crypto-service/SimCryptoService.csproj -c Release | Out-Host
|
||||
dotnet build ops/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj -c Release | Out-Host
|
||||
|
||||
Write-Host "Starting sim service at $BaseUrl ..."
|
||||
$job = Start-Job -ArgumentList $repoRoot, $BaseUrl -ScriptBlock {
|
||||
param($path, $url)
|
||||
Set-Location $path
|
||||
$env:ASPNETCORE_URLS = $url
|
||||
dotnet run --project ops/crypto/sim-crypto-service/SimCryptoService.csproj --no-build -c Release
|
||||
}
|
||||
|
||||
Start-Sleep -Seconds 6
|
||||
|
||||
$env:STELLAOPS_CRYPTO_SIM_URL = $BaseUrl
|
||||
$env:SIM_PROFILE = $SimProfile
|
||||
Write-Host "Running smoke harness (profile=$SimProfile, url=$BaseUrl)..."
|
||||
dotnet run --project ops/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj --no-build -c Release
|
||||
$exitCode = $LASTEXITCODE
|
||||
if ($exitCode -ne 0) {
|
||||
throw "Smoke harness failed with exit code $exitCode"
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if ($job) {
|
||||
Stop-Job $job -ErrorAction SilentlyContinue | Out-Null
|
||||
Receive-Job $job -ErrorAction SilentlyContinue | Out-Null
|
||||
Remove-Job $job -ErrorAction SilentlyContinue | Out-Null
|
||||
}
|
||||
Pop-Location
|
||||
}
|
||||
108
devops/tools/crypto/validate-openssl-gost.sh
Executable file
108
devops/tools/crypto/validate-openssl-gost.sh
Executable file
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if ! command -v docker >/dev/null 2>&1; then
|
||||
echo "[gost-validate] docker is required but not found on PATH" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ROOT_DIR="$(git rev-parse --show-toplevel)"
|
||||
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
LOG_ROOT="${OPENSSL_GOST_LOG_DIR:-${ROOT_DIR}/logs/openssl_gost_validation_${TIMESTAMP}}"
|
||||
IMAGE="${OPENSSL_GOST_IMAGE:-rnix/openssl-gost:latest}"
|
||||
MOUNT_PATH="${LOG_ROOT}"
|
||||
|
||||
UNAME_OUT="$(uname -s || true)"
|
||||
case "${UNAME_OUT}" in
|
||||
MINGW*|MSYS*|CYGWIN*)
|
||||
if command -v wslpath >/dev/null 2>&1; then
|
||||
# Docker Desktop on Windows prefers Windows-style mount paths.
|
||||
MOUNT_PATH="$(wslpath -m "${LOG_ROOT}")"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
MOUNT_PATH="${LOG_ROOT}"
|
||||
;;
|
||||
esac
|
||||
|
||||
mkdir -p "${LOG_ROOT}"
|
||||
|
||||
cat >"${LOG_ROOT}/message.txt" <<'EOF'
|
||||
StellaOps OpenSSL GOST validation message (md_gost12_256)
|
||||
EOF
|
||||
|
||||
echo "[gost-validate] Using image ${IMAGE}"
|
||||
docker pull "${IMAGE}" >/dev/null
|
||||
|
||||
CONTAINER_SCRIPT_PATH="${LOG_ROOT}/container-script.sh"
|
||||
|
||||
cat > "${CONTAINER_SCRIPT_PATH}" <<'CONTAINER_SCRIPT'
|
||||
set -eu
|
||||
|
||||
MESSAGE="/out/message.txt"
|
||||
|
||||
openssl version -a > /out/openssl-version.txt
|
||||
openssl engine -c > /out/engine-list.txt
|
||||
|
||||
openssl genpkey -engine gost -algorithm gost2012_256 -pkeyopt paramset:A -out /tmp/gost.key.pem >/dev/null
|
||||
openssl pkey -engine gost -in /tmp/gost.key.pem -pubout -out /out/gost.pub.pem >/dev/null
|
||||
|
||||
DIGEST_LINE="$(openssl dgst -engine gost -md_gost12_256 "${MESSAGE}")"
|
||||
echo "${DIGEST_LINE}" > /out/digest.txt
|
||||
DIGEST="$(printf "%s" "${DIGEST_LINE}" | awk -F'= ' '{print $2}')"
|
||||
|
||||
openssl dgst -engine gost -md_gost12_256 -sign /tmp/gost.key.pem -out /tmp/signature1.bin "${MESSAGE}"
|
||||
openssl dgst -engine gost -md_gost12_256 -sign /tmp/gost.key.pem -out /tmp/signature2.bin "${MESSAGE}"
|
||||
|
||||
openssl dgst -engine gost -md_gost12_256 -verify /out/gost.pub.pem -signature /tmp/signature1.bin "${MESSAGE}" > /out/verify1.txt
|
||||
openssl dgst -engine gost -md_gost12_256 -verify /out/gost.pub.pem -signature /tmp/signature2.bin "${MESSAGE}" > /out/verify2.txt
|
||||
|
||||
SIG1_SHA="$(sha256sum /tmp/signature1.bin | awk '{print $1}')"
|
||||
SIG2_SHA="$(sha256sum /tmp/signature2.bin | awk '{print $1}')"
|
||||
MSG_SHA="$(sha256sum "${MESSAGE}" | awk '{print $1}')"
|
||||
|
||||
cp /tmp/signature1.bin /out/signature1.bin
|
||||
cp /tmp/signature2.bin /out/signature2.bin
|
||||
|
||||
DETERMINISTIC_BOOL=false
|
||||
DETERMINISTIC_LABEL="no"
|
||||
if [ "${SIG1_SHA}" = "${SIG2_SHA}" ]; then
|
||||
DETERMINISTIC_BOOL=true
|
||||
DETERMINISTIC_LABEL="yes"
|
||||
fi
|
||||
|
||||
cat > /out/summary.txt <<SUMMARY
|
||||
OpenSSL GOST validation (Linux engine)
|
||||
Image: ${VALIDATION_IMAGE:-unknown}
|
||||
Digest algorithm: md_gost12_256
|
||||
Message SHA256: ${MSG_SHA}
|
||||
Digest: ${DIGEST}
|
||||
Signature1 SHA256: ${SIG1_SHA}
|
||||
Signature2 SHA256: ${SIG2_SHA}
|
||||
Signatures deterministic: ${DETERMINISTIC_LABEL}
|
||||
SUMMARY
|
||||
|
||||
cat > /out/summary.json <<SUMMARYJSON
|
||||
{
|
||||
"image": "${VALIDATION_IMAGE:-unknown}",
|
||||
"digest_algorithm": "md_gost12_256",
|
||||
"message_sha256": "${MSG_SHA}",
|
||||
"digest": "${DIGEST}",
|
||||
"signature1_sha256": "${SIG1_SHA}",
|
||||
"signature2_sha256": "${SIG2_SHA}",
|
||||
"signatures_deterministic": ${DETERMINISTIC_BOOL}
|
||||
}
|
||||
SUMMARYJSON
|
||||
|
||||
CONTAINER_SCRIPT
|
||||
|
||||
docker run --rm \
|
||||
-e VALIDATION_IMAGE="${IMAGE}" \
|
||||
-v "${MOUNT_PATH}:/out" \
|
||||
"${IMAGE}" /bin/sh "/out/$(basename "${CONTAINER_SCRIPT_PATH}")"
|
||||
|
||||
rm -f "${CONTAINER_SCRIPT_PATH}"
|
||||
|
||||
echo "[gost-validate] Artifacts written to ${LOG_ROOT}"
|
||||
echo "[gost-validate] Summary:"
|
||||
cat "${LOG_ROOT}/summary.txt"
|
||||
160
devops/tools/determinism/compare-platform-hashes.py
Normal file
160
devops/tools/determinism/compare-platform-hashes.py
Normal file
@@ -0,0 +1,160 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cross-platform hash comparison for determinism verification.
|
||||
Sprint: SPRINT_20251226_007_BE_determinism_gaps
|
||||
Task: DET-GAP-13 - Cross-platform hash comparison report generation
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def load_hashes(path: str) -> dict[str, str]:
|
||||
"""Load hash file from path."""
|
||||
with open(path) as f:
|
||||
data = json.load(f)
|
||||
return data.get("hashes", data)
|
||||
|
||||
|
||||
def compare_hashes(
|
||||
linux: dict[str, str],
|
||||
windows: dict[str, str],
|
||||
macos: dict[str, str]
|
||||
) -> tuple[list[dict], list[str]]:
|
||||
"""
|
||||
Compare hashes across platforms.
|
||||
Returns (divergences, matched_keys).
|
||||
"""
|
||||
all_keys = set(linux.keys()) | set(windows.keys()) | set(macos.keys())
|
||||
divergences = []
|
||||
matched = []
|
||||
|
||||
for key in sorted(all_keys):
|
||||
linux_hash = linux.get(key, "MISSING")
|
||||
windows_hash = windows.get(key, "MISSING")
|
||||
macos_hash = macos.get(key, "MISSING")
|
||||
|
||||
if linux_hash == windows_hash == macos_hash:
|
||||
matched.append(key)
|
||||
else:
|
||||
divergences.append({
|
||||
"key": key,
|
||||
"linux": linux_hash,
|
||||
"windows": windows_hash,
|
||||
"macos": macos_hash
|
||||
})
|
||||
|
||||
return divergences, matched
|
||||
|
||||
|
||||
def generate_markdown_report(
|
||||
divergences: list[dict],
|
||||
matched: list[str],
|
||||
linux_path: str,
|
||||
windows_path: str,
|
||||
macos_path: str
|
||||
) -> str:
|
||||
"""Generate Markdown report."""
|
||||
lines = [
|
||||
f"**Generated:** {datetime.now(timezone.utc).isoformat()}",
|
||||
"",
|
||||
"### Summary",
|
||||
"",
|
||||
f"- ✅ **Matched:** {len(matched)} hashes",
|
||||
f"- {'❌' if divergences else '✅'} **Divergences:** {len(divergences)} hashes",
|
||||
"",
|
||||
]
|
||||
|
||||
if divergences:
|
||||
lines.extend([
|
||||
"### Divergences",
|
||||
"",
|
||||
"| Key | Linux | Windows | macOS |",
|
||||
"|-----|-------|---------|-------|",
|
||||
])
|
||||
for d in divergences:
|
||||
linux_short = d["linux"][:16] + "..." if len(d["linux"]) > 16 else d["linux"]
|
||||
windows_short = d["windows"][:16] + "..." if len(d["windows"]) > 16 else d["windows"]
|
||||
macos_short = d["macos"][:16] + "..." if len(d["macos"]) > 16 else d["macos"]
|
||||
lines.append(f"| `{d['key']}` | `{linux_short}` | `{windows_short}` | `{macos_short}` |")
|
||||
lines.append("")
|
||||
|
||||
lines.extend([
|
||||
"### Matched Hashes",
|
||||
"",
|
||||
f"<details><summary>Show {len(matched)} matched hashes</summary>",
|
||||
"",
|
||||
])
|
||||
for key in matched[:50]: # Limit display
|
||||
lines.append(f"- `{key}`")
|
||||
if len(matched) > 50:
|
||||
lines.append(f"- ... and {len(matched) - 50} more")
|
||||
lines.extend(["", "</details>", ""])
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Compare determinism hashes across platforms")
|
||||
parser.add_argument("--linux", required=True, help="Path to Linux hashes JSON")
|
||||
parser.add_argument("--windows", required=True, help="Path to Windows hashes JSON")
|
||||
parser.add_argument("--macos", required=True, help="Path to macOS hashes JSON")
|
||||
parser.add_argument("--output", required=True, help="Output JSON report path")
|
||||
parser.add_argument("--markdown", required=True, help="Output Markdown report path")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Load hashes
|
||||
linux_hashes = load_hashes(args.linux)
|
||||
windows_hashes = load_hashes(args.windows)
|
||||
macos_hashes = load_hashes(args.macos)
|
||||
|
||||
# Compare
|
||||
divergences, matched = compare_hashes(linux_hashes, windows_hashes, macos_hashes)
|
||||
|
||||
# Generate reports
|
||||
report = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"sources": {
|
||||
"linux": args.linux,
|
||||
"windows": args.windows,
|
||||
"macos": args.macos
|
||||
},
|
||||
"summary": {
|
||||
"matched": len(matched),
|
||||
"divergences": len(divergences),
|
||||
"total": len(matched) + len(divergences)
|
||||
},
|
||||
"divergences": divergences,
|
||||
"matched": matched
|
||||
}
|
||||
|
||||
# Write JSON report
|
||||
Path(args.output).parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(args.output, "w") as f:
|
||||
json.dump(report, f, indent=2)
|
||||
|
||||
# Write Markdown report
|
||||
markdown = generate_markdown_report(
|
||||
divergences, matched,
|
||||
args.linux, args.windows, args.macos
|
||||
)
|
||||
with open(args.markdown, "w") as f:
|
||||
f.write(markdown)
|
||||
|
||||
# Print summary
|
||||
print(f"Comparison complete:")
|
||||
print(f" Matched: {len(matched)}")
|
||||
print(f" Divergences: {len(divergences)}")
|
||||
|
||||
# Exit with error if divergences found
|
||||
if divergences:
|
||||
print("\nERROR: Hash divergences detected!")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
28
devops/tools/devportal-tools/hash-snippets.sh
Normal file
28
devops/tools/devportal-tools/hash-snippets.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
# Deterministic hashing helper for DevPortal SDK snippet packs and offline bundle artefacts.
|
||||
# Usage:
|
||||
# SNIPPET_DIR=src/DevPortal/StellaOps.DevPortal.Site/snippets \
|
||||
# OUT_SHA=src/DevPortal/StellaOps.DevPortal.Site/SHA256SUMS.devportal-stubs \
|
||||
# tools/devportal/hash-snippets.sh
|
||||
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
SNIPPET_DIR="${SNIPPET_DIR:-$ROOT/src/DevPortal/StellaOps.DevPortal.Site/snippets}"
|
||||
OUT_SHA="${OUT_SHA:-$ROOT/src/DevPortal/StellaOps.DevPortal.Site/SHA256SUMS.devportal-stubs}"
|
||||
|
||||
if [[ ! -d "$SNIPPET_DIR" ]]; then
|
||||
echo "Snippet dir not found: $SNIPPET_DIR" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "$(dirname "$OUT_SHA")"
|
||||
: > "$OUT_SHA"
|
||||
|
||||
cd "$SNIPPET_DIR"
|
||||
find . -type f -print0 | sort -z | while IFS= read -r -d '' f; do
|
||||
sha=$(sha256sum "$f" | cut -d' ' -f1)
|
||||
printf "%s %s\n" "$sha" "${SNIPPET_DIR#$ROOT/}/$f" >> "$OUT_SHA"
|
||||
echo "hashed $f"
|
||||
done
|
||||
|
||||
echo "Hashes written to $OUT_SHA"
|
||||
11
devops/tools/export-policy-schemas.sh
Normal file
11
devops/tools/export-policy-schemas.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")"/.. && pwd)"
|
||||
OUTPUT_DIR="${1:-$REPO_ROOT/docs/schemas}"
|
||||
|
||||
pushd "$REPO_ROOT" > /dev/null
|
||||
|
||||
dotnet run --project src/Tools/PolicySchemaExporter -- "$OUTPUT_DIR"
|
||||
|
||||
popd > /dev/null
|
||||
22
devops/tools/export-scripts/oci-verify.sh
Normal file
22
devops/tools/export-scripts/oci-verify.sh
Normal file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Verify OCI distribution path works (push/pull loop).
|
||||
|
||||
IMAGE=${IMAGE:-"ghcr.io/stella-ops/exporter:edge"}
|
||||
TMP="out/export-oci"
|
||||
mkdir -p "$TMP"
|
||||
|
||||
echo "[export-oci] pulling $IMAGE"
|
||||
docker pull "$IMAGE"
|
||||
|
||||
echo "[export-oci] retagging and pushing to local cache"
|
||||
LOCAL="localhost:5001/exporter:test"
|
||||
docker tag "$IMAGE" "$LOCAL"
|
||||
|
||||
docker push "$LOCAL" || echo "[export-oci] push skipped (no local registry?)"
|
||||
|
||||
echo "[export-oci] pulling back for verification"
|
||||
docker pull "$LOCAL" || true
|
||||
|
||||
echo "[export-oci] done"
|
||||
24
devops/tools/export-scripts/trivy-compat.sh
Normal file
24
devops/tools/export-scripts/trivy-compat.sh
Normal file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# DEVOPS-EXPORT-36-001: Trivy compatibility & signing checks
|
||||
|
||||
IMAGE=${IMAGE:-"ghcr.io/stella-ops/exporter:edge"}
|
||||
OUT="out/export-compat"
|
||||
mkdir -p "$OUT"
|
||||
|
||||
echo "[export-compat] pulling image $IMAGE"
|
||||
docker pull "$IMAGE"
|
||||
|
||||
echo "[export-compat] running trivy image --severity HIGH,CRITICAL"
|
||||
trivy image --severity HIGH,CRITICAL --quiet "$IMAGE" > "$OUT/trivy.txt" || true
|
||||
|
||||
echo "[export-compat] verifying cosign signature if present"
|
||||
if command -v cosign >/dev/null 2>&1; then
|
||||
cosign verify "$IMAGE" > "$OUT/cosign.txt" || true
|
||||
fi
|
||||
|
||||
echo "[export-compat] trivy module db import smoke"
|
||||
trivy module db import --file "$OUT/trivy-module.db" 2>/dev/null || true
|
||||
|
||||
echo "[export-compat] done; outputs in $OUT"
|
||||
Binary file not shown.
467
devops/tools/feeds/run_icscisa_kisa_refresh.py
Normal file
467
devops/tools/feeds/run_icscisa_kisa_refresh.py
Normal file
@@ -0,0 +1,467 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
ICS/KISA feed refresh runner.
|
||||
|
||||
Runs the SOP v0.2 workflow to emit NDJSON advisories, delta, fetch log, and hash
|
||||
manifest under out/feeds/icscisa-kisa/<YYYYMMDD>/.
|
||||
|
||||
Defaults to live fetch with offline-safe fallback to baked-in samples. You can
|
||||
force live/offline via env or CLI flags.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from html import unescape
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, List, Tuple
|
||||
from urllib.error import URLError, HTTPError
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
from urllib.request import Request, urlopen
|
||||
from xml.etree import ElementTree
|
||||
|
||||
|
||||
DEFAULT_OUTPUT_ROOT = Path("out/feeds/icscisa-kisa")
|
||||
DEFAULT_ICSCISA_URL = "https://www.cisa.gov/news-events/ics-advisories/icsa.xml"
|
||||
DEFAULT_KISA_URL = "https://knvd.krcert.or.kr/rss/securityInfo.do"
|
||||
DEFAULT_GATEWAY_HOST = "concelier-webservice"
|
||||
DEFAULT_GATEWAY_SCHEME = "http"
|
||||
USER_AGENT = "StellaOpsFeedRefresh/1.0 (+https://stella-ops.org)"
|
||||
|
||||
|
||||
def utcnow() -> dt.datetime:
|
||||
return dt.datetime.utcnow().replace(tzinfo=dt.timezone.utc)
|
||||
|
||||
|
||||
def iso(ts: dt.datetime) -> str:
|
||||
return ts.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
|
||||
def sha256_bytes(data: bytes) -> str:
|
||||
return hashlib.sha256(data).hexdigest()
|
||||
|
||||
|
||||
def strip_html(value: str) -> str:
|
||||
return re.sub(r"<[^>]+>", "", value or "").strip()
|
||||
|
||||
|
||||
def safe_request(url: str) -> bytes:
|
||||
req = Request(url, headers={"User-Agent": USER_AGENT})
|
||||
with urlopen(req, timeout=30) as resp:
|
||||
return resp.read()
|
||||
|
||||
|
||||
def parse_rss_items(xml_bytes: bytes) -> Iterable[Dict[str, str]]:
|
||||
root = ElementTree.fromstring(xml_bytes)
|
||||
for item in root.findall(".//item"):
|
||||
title = (item.findtext("title") or "").strip()
|
||||
link = (item.findtext("link") or "").strip()
|
||||
description = strip_html(unescape(item.findtext("description") or ""))
|
||||
pub_date = (item.findtext("pubDate") or "").strip()
|
||||
yield {
|
||||
"title": title,
|
||||
"link": link,
|
||||
"description": description,
|
||||
"pub_date": pub_date,
|
||||
}
|
||||
|
||||
|
||||
def normalize_icscisa_record(item: Dict[str, str], fetched_at: str, run_id: str) -> Dict[str, object]:
|
||||
advisory_id = item["title"].split(":")[0].strip() or "icsa-unknown"
|
||||
summary = item["description"] or item["title"]
|
||||
raw_payload = f"{item['title']}\n{item['link']}\n{item['description']}"
|
||||
record = {
|
||||
"advisory_id": advisory_id,
|
||||
"source": "icscisa",
|
||||
"source_url": item["link"] or DEFAULT_ICSCISA_URL,
|
||||
"title": item["title"] or advisory_id,
|
||||
"summary": summary,
|
||||
"published": iso(parse_pubdate(item["pub_date"])),
|
||||
"updated": iso(parse_pubdate(item["pub_date"])),
|
||||
"severity": "unknown",
|
||||
"cvss": None,
|
||||
"cwe": [],
|
||||
"affected_products": [],
|
||||
"references": [url for url in (item["link"],) if url],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": fetched_at,
|
||||
"run_id": run_id,
|
||||
"payload_sha256": sha256_bytes(raw_payload.encode("utf-8")),
|
||||
}
|
||||
return record
|
||||
|
||||
|
||||
def normalize_kisa_record(item: Dict[str, str], fetched_at: str, run_id: str) -> Dict[str, object]:
|
||||
advisory_id = extract_kisa_id(item)
|
||||
raw_payload = f"{item['title']}\n{item['link']}\n{item['description']}"
|
||||
record = {
|
||||
"advisory_id": advisory_id,
|
||||
"source": "kisa",
|
||||
"source_url": item["link"] or DEFAULT_KISA_URL,
|
||||
"title": item["title"] or advisory_id,
|
||||
"summary": item["description"] or item["title"],
|
||||
"published": iso(parse_pubdate(item["pub_date"])),
|
||||
"updated": iso(parse_pubdate(item["pub_date"])),
|
||||
"severity": "unknown",
|
||||
"cvss": None,
|
||||
"cwe": [],
|
||||
"affected_products": [],
|
||||
"references": [url for url in (item["link"], DEFAULT_KISA_URL) if url],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": fetched_at,
|
||||
"run_id": run_id,
|
||||
"payload_sha256": sha256_bytes(raw_payload.encode("utf-8")),
|
||||
}
|
||||
return record
|
||||
|
||||
|
||||
def extract_kisa_id(item: Dict[str, str]) -> str:
|
||||
link = item["link"]
|
||||
match = re.search(r"IDX=([0-9]+)", link)
|
||||
if match:
|
||||
return f"KISA-{match.group(1)}"
|
||||
return (item["title"].split()[0] if item["title"] else "KISA-unknown").strip()
|
||||
|
||||
|
||||
def parse_pubdate(value: str) -> dt.datetime:
|
||||
if not value:
|
||||
return utcnow()
|
||||
try:
|
||||
# RFC1123-ish
|
||||
return dt.datetime.strptime(value, "%a, %d %b %Y %H:%M:%S %Z").replace(tzinfo=dt.timezone.utc)
|
||||
except ValueError:
|
||||
try:
|
||||
return dt.datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return utcnow()
|
||||
|
||||
|
||||
def sample_records() -> List[Dict[str, object]]:
|
||||
now_iso = iso(utcnow())
|
||||
return [
|
||||
{
|
||||
"advisory_id": "ICSA-25-123-01",
|
||||
"source": "icscisa",
|
||||
"source_url": "https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01",
|
||||
"title": "Example ICS Advisory",
|
||||
"summary": "Example Corp ControlSuite RCE via exposed management service.",
|
||||
"published": "2025-10-13T12:00:00Z",
|
||||
"updated": "2025-11-30T00:00:00Z",
|
||||
"severity": "High",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", "score": 9.8},
|
||||
"cwe": ["CWE-269"],
|
||||
"affected_products": [{"vendor": "Example Corp", "product": "ControlSuite", "versions": ["4.2.0", "4.2.1"]}],
|
||||
"references": [
|
||||
"https://example.com/security/icsa-25-123-01.pdf",
|
||||
"https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"ICSA-25-123-01 Example ControlSuite advisory payload"),
|
||||
},
|
||||
{
|
||||
"advisory_id": "ICSMA-25-045-01",
|
||||
"source": "icscisa",
|
||||
"source_url": "https://www.cisa.gov/news-events/ics-medical-advisories/icsma-25-045-01",
|
||||
"title": "Example Medical Advisory",
|
||||
"summary": "HealthTech infusion pump vulnerabilities including two CVEs.",
|
||||
"published": "2025-10-14T09:30:00Z",
|
||||
"updated": "2025-12-01T00:00:00Z",
|
||||
"severity": "Medium",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:H/PR:L/UI:R/S:U/C:L/I:L/A:L", "score": 6.3},
|
||||
"cwe": ["CWE-319"],
|
||||
"affected_products": [{"vendor": "HealthTech", "product": "InfusionManager", "versions": ["2.1.0", "2.1.1"]}],
|
||||
"references": [
|
||||
"https://www.cisa.gov/news-events/ics-medical-advisories/icsma-25-045-01",
|
||||
"https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2025-11111",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"ICSMA-25-045-01 Example medical advisory payload"),
|
||||
},
|
||||
{
|
||||
"advisory_id": "KISA-2025-5859",
|
||||
"source": "kisa",
|
||||
"source_url": "https://knvd.krcert.or.kr/detailDos.do?IDX=5859",
|
||||
"title": "KISA sample advisory 5859",
|
||||
"summary": "Remote code execution in ControlBoard service (offline HTML snapshot).",
|
||||
"published": "2025-11-03T22:53:00Z",
|
||||
"updated": "2025-12-02T00:00:00Z",
|
||||
"severity": "High",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", "score": 9.8},
|
||||
"cwe": ["CWE-787"],
|
||||
"affected_products": [{"vendor": "ACME", "product": "ControlBoard", "versions": ["1.0.1.0084", "2.0.1.0034"]}],
|
||||
"references": [
|
||||
"https://knvd.krcert.or.kr/rss/securityInfo.do",
|
||||
"https://knvd.krcert.or.kr/detailDos.do?IDX=5859",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"KISA advisory IDX 5859 cached HTML payload"),
|
||||
},
|
||||
{
|
||||
"advisory_id": "KISA-2025-5860",
|
||||
"source": "kisa",
|
||||
"source_url": "https://knvd.krcert.or.kr/detailDos.do?IDX=5860",
|
||||
"title": "KISA sample advisory 5860",
|
||||
"summary": "Authentication bypass via default credentials in NetGateway appliance.",
|
||||
"published": "2025-11-03T22:53:00Z",
|
||||
"updated": "2025-12-02T00:00:00Z",
|
||||
"severity": "Medium",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:L/I:L/A:L", "score": 7.3},
|
||||
"cwe": ["CWE-798"],
|
||||
"affected_products": [{"vendor": "NetGateway", "product": "Edge", "versions": ["3.4.2", "3.4.3"]}],
|
||||
"references": [
|
||||
"https://knvd.krcert.or.kr/rss/securityInfo.do",
|
||||
"https://knvd.krcert.or.kr/detailDos.do?IDX=5860",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"KISA advisory IDX 5860 cached HTML payload"),
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def build_records(
|
||||
run_id: str,
|
||||
fetched_at: str,
|
||||
live_fetch: bool,
|
||||
offline_only: bool,
|
||||
icscisa_url: str,
|
||||
kisa_url: str,
|
||||
) -> Tuple[List[Dict[str, object]], Dict[str, str]]:
|
||||
samples = sample_records()
|
||||
sample_icscisa = [r for r in samples if r["source"] == "icscisa"]
|
||||
sample_kisa = [r for r in samples if r["source"] == "kisa"]
|
||||
status = {"icscisa": "offline", "kisa": "offline"}
|
||||
records: List[Dict[str, object]] = []
|
||||
|
||||
if live_fetch and not offline_only:
|
||||
try:
|
||||
icscisa_items = list(parse_rss_items(safe_request(icscisa_url)))
|
||||
for item in icscisa_items:
|
||||
records.append(normalize_icscisa_record(item, fetched_at, run_id))
|
||||
status["icscisa"] = f"live:{len(icscisa_items)}"
|
||||
except (URLError, HTTPError, ElementTree.ParseError, TimeoutError) as exc:
|
||||
print(f"[warn] ICS CISA fetch failed ({exc}); falling back to samples.", file=sys.stderr)
|
||||
|
||||
try:
|
||||
kisa_items = list(parse_rss_items(safe_request(kisa_url)))
|
||||
for item in kisa_items:
|
||||
records.append(normalize_kisa_record(item, fetched_at, run_id))
|
||||
status["kisa"] = f"live:{len(kisa_items)}"
|
||||
except (URLError, HTTPError, ElementTree.ParseError, TimeoutError) as exc:
|
||||
print(f"[warn] KISA fetch failed ({exc}); falling back to samples.", file=sys.stderr)
|
||||
|
||||
if not records or status["icscisa"].startswith("live") is False:
|
||||
records.extend(apply_run_metadata(sample_icscisa, run_id, fetched_at))
|
||||
status["icscisa"] = status.get("icscisa") or "offline"
|
||||
|
||||
if not any(r["source"] == "kisa" for r in records):
|
||||
records.extend(apply_run_metadata(sample_kisa, run_id, fetched_at))
|
||||
status["kisa"] = status.get("kisa") or "offline"
|
||||
|
||||
return records, status
|
||||
|
||||
|
||||
def apply_run_metadata(records: Iterable[Dict[str, object]], run_id: str, fetched_at: str) -> List[Dict[str, object]]:
|
||||
updated = []
|
||||
for record in records:
|
||||
copy = dict(record)
|
||||
copy["run_id"] = run_id
|
||||
copy["fetched_at"] = fetched_at
|
||||
copy["payload_sha256"] = record.get("payload_sha256") or sha256_bytes(json.dumps(record, sort_keys=True).encode("utf-8"))
|
||||
updated.append(copy)
|
||||
return updated
|
||||
|
||||
|
||||
def find_previous_snapshot(base_dir: Path, current_run_date: str) -> Path | None:
|
||||
if not base_dir.exists():
|
||||
return None
|
||||
candidates = sorted(p for p in base_dir.iterdir() if p.is_dir() and p.name != current_run_date)
|
||||
if not candidates:
|
||||
return None
|
||||
return candidates[-1] / "advisories.ndjson"
|
||||
|
||||
|
||||
def load_previous_hash(path: Path | None) -> str | None:
|
||||
if path and path.exists():
|
||||
return sha256_bytes(path.read_bytes())
|
||||
return None
|
||||
|
||||
|
||||
def compute_delta(new_records: List[Dict[str, object]], previous_path: Path | None) -> Dict[str, object]:
|
||||
prev_records = {}
|
||||
if previous_path and previous_path.exists():
|
||||
with previous_path.open("r", encoding="utf-8") as handle:
|
||||
for line in handle:
|
||||
if line.strip():
|
||||
rec = json.loads(line)
|
||||
prev_records[rec["advisory_id"]] = rec
|
||||
|
||||
new_by_id = {r["advisory_id"]: r for r in new_records}
|
||||
added = [rid for rid in new_by_id if rid not in prev_records]
|
||||
updated = [
|
||||
rid
|
||||
for rid, rec in new_by_id.items()
|
||||
if rid in prev_records and rec.get("payload_sha256") != prev_records[rid].get("payload_sha256")
|
||||
]
|
||||
removed = [rid for rid in prev_records if rid not in new_by_id]
|
||||
|
||||
return {
|
||||
"added": {"icscisa": [rid for rid in added if new_by_id[rid]["source"] == "icscisa"],
|
||||
"kisa": [rid for rid in added if new_by_id[rid]["source"] == "kisa"]},
|
||||
"updated": {"icscisa": [rid for rid in updated if new_by_id[rid]["source"] == "icscisa"],
|
||||
"kisa": [rid for rid in updated if new_by_id[rid]["source"] == "kisa"]},
|
||||
"removed": {"icscisa": [rid for rid in removed if prev_records[rid]["source"] == "icscisa"],
|
||||
"kisa": [rid for rid in removed if prev_records[rid]["source"] == "kisa"]},
|
||||
"totals": {
|
||||
"icscisa": {
|
||||
"added": len([rid for rid in added if new_by_id[rid]["source"] == "icscisa"]),
|
||||
"updated": len([rid for rid in updated if new_by_id[rid]["source"] == "icscisa"]),
|
||||
"removed": len([rid for rid in removed if prev_records[rid]["source"] == "icscisa"]),
|
||||
"remaining": len([rid for rid, rec in new_by_id.items() if rec["source"] == "icscisa"]),
|
||||
},
|
||||
"kisa": {
|
||||
"added": len([rid for rid in added if new_by_id[rid]["source"] == "kisa"]),
|
||||
"updated": len([rid for rid in updated if new_by_id[rid]["source"] == "kisa"]),
|
||||
"removed": len([rid for rid in removed if prev_records[rid]["source"] == "kisa"]),
|
||||
"remaining": len([rid for rid, rec in new_by_id.items() if rec["source"] == "kisa"]),
|
||||
},
|
||||
"overall": len(new_records),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def write_ndjson(records: List[Dict[str, object]], path: Path) -> None:
|
||||
path.write_text("\n".join(json.dumps(r, sort_keys=True, separators=(",", ":")) for r in records) + "\n", encoding="utf-8")
|
||||
|
||||
|
||||
def write_fetch_log(
|
||||
path: Path,
|
||||
run_id: str,
|
||||
start: str,
|
||||
end: str,
|
||||
status: Dict[str, str],
|
||||
gateway_host: str,
|
||||
gateway_scheme: str,
|
||||
icscisa_url: str,
|
||||
kisa_url: str,
|
||||
live_fetch: bool,
|
||||
offline_only: bool,
|
||||
) -> None:
|
||||
lines = [
|
||||
f"run_id={run_id} start={start} end={end}",
|
||||
f"sources=icscisa,kisa cadence=weekly backlog_window=60d live_fetch={str(live_fetch).lower()} offline_only={str(offline_only).lower()}",
|
||||
f"gateway={gateway_scheme}://{gateway_host}",
|
||||
f"icscisa_url={icscisa_url} status={status.get('icscisa','offline')} retries=0",
|
||||
f"kisa_url={kisa_url} status={status.get('kisa','offline')} retries=0",
|
||||
"outputs=advisories.ndjson,delta.json,hashes.sha256",
|
||||
]
|
||||
path.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
||||
|
||||
|
||||
def write_hashes(dir_path: Path) -> None:
|
||||
entries = []
|
||||
for name in ["advisories.ndjson", "delta.json", "fetch.log"]:
|
||||
file_path = dir_path / name
|
||||
entries.append(f"{sha256_bytes(file_path.read_bytes())} {name}")
|
||||
(dir_path / "hashes.sha256").write_text("\n".join(entries) + "\n", encoding="utf-8")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Run ICS/KISA feed refresh SOP v0.2")
|
||||
parser.add_argument("--out-dir", default=str(DEFAULT_OUTPUT_ROOT), help="Base output directory (default: out/feeds/icscisa-kisa)")
|
||||
parser.add_argument("--run-date", default=None, help="Override run date (YYYYMMDD)")
|
||||
parser.add_argument("--run-id", default=None, help="Override run id")
|
||||
parser.add_argument("--live", action="store_true", default=False, help="Force live fetch (default: enabled via env LIVE_FETCH=true)")
|
||||
parser.add_argument("--offline", action="store_true", default=False, help="Force offline samples only")
|
||||
args = parser.parse_args()
|
||||
|
||||
now = utcnow()
|
||||
run_date = args.run_date or now.strftime("%Y%m%d")
|
||||
run_id = args.run_id or f"icscisa-kisa-{now.strftime('%Y%m%dT%H%M%SZ')}"
|
||||
fetched_at = iso(now)
|
||||
start = fetched_at
|
||||
|
||||
live_fetch = args.live or os.getenv("LIVE_FETCH", "true").lower() == "true"
|
||||
offline_only = args.offline or os.getenv("OFFLINE_SNAPSHOT", "false").lower() == "true"
|
||||
|
||||
output_root = Path(args.out_dir)
|
||||
output_dir = output_root / run_date
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
previous_path = find_previous_snapshot(output_root, run_date)
|
||||
|
||||
gateway_host = os.getenv("FEED_GATEWAY_HOST", DEFAULT_GATEWAY_HOST)
|
||||
gateway_scheme = os.getenv("FEED_GATEWAY_SCHEME", DEFAULT_GATEWAY_SCHEME)
|
||||
|
||||
def resolve_feed(url_env: str, default_url: str) -> str:
|
||||
if url_env:
|
||||
return url_env
|
||||
parsed = urlparse(default_url)
|
||||
# Replace host/scheme to allow on-prem DNS (docker network) defaults.
|
||||
rewritten = parsed._replace(netloc=gateway_host, scheme=gateway_scheme)
|
||||
return urlunparse(rewritten)
|
||||
|
||||
resolved_icscisa_url = resolve_feed(os.getenv("ICSCISA_FEED_URL"), DEFAULT_ICSCISA_URL)
|
||||
resolved_kisa_url = resolve_feed(os.getenv("KISA_FEED_URL"), DEFAULT_KISA_URL)
|
||||
|
||||
records, status = build_records(
|
||||
run_id=run_id,
|
||||
fetched_at=fetched_at,
|
||||
live_fetch=live_fetch,
|
||||
offline_only=offline_only,
|
||||
icscisa_url=resolved_icscisa_url,
|
||||
kisa_url=resolved_kisa_url,
|
||||
)
|
||||
|
||||
write_ndjson(records, output_dir / "advisories.ndjson")
|
||||
|
||||
delta = compute_delta(records, previous_path)
|
||||
delta_payload = {
|
||||
"run_id": run_id,
|
||||
"generated_at": iso(utcnow()),
|
||||
**delta,
|
||||
"previous_snapshot_sha256": load_previous_hash(previous_path),
|
||||
}
|
||||
(output_dir / "delta.json").write_text(json.dumps(delta_payload, separators=(",", ":")) + "\n", encoding="utf-8")
|
||||
|
||||
end = iso(utcnow())
|
||||
write_fetch_log(
|
||||
output_dir / "fetch.log",
|
||||
run_id,
|
||||
start,
|
||||
end,
|
||||
status,
|
||||
gateway_host=gateway_host,
|
||||
gateway_scheme=gateway_scheme,
|
||||
icscisa_url=resolved_icscisa_url,
|
||||
kisa_url=resolved_kisa_url,
|
||||
live_fetch=live_fetch and not offline_only,
|
||||
offline_only=offline_only,
|
||||
)
|
||||
write_hashes(output_dir)
|
||||
|
||||
print(f"[ok] wrote {len(records)} advisories to {output_dir}")
|
||||
print(f" run_id={run_id} live_fetch={live_fetch and not offline_only} offline_only={offline_only}")
|
||||
print(f" gateway={gateway_scheme}://{gateway_host}")
|
||||
print(f" icscisa_url={resolved_icscisa_url}")
|
||||
print(f" kisa_url={resolved_kisa_url}")
|
||||
print(f" status={status}")
|
||||
if previous_path:
|
||||
print(f" previous_snapshot={previous_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
38
devops/tools/fetch-ics-cisa-seed.ps1
Normal file
38
devops/tools/fetch-ics-cisa-seed.ps1
Normal file
@@ -0,0 +1,38 @@
|
||||
param(
|
||||
[string]$Destination = "$(Join-Path (Split-Path -Parent $PSCommandPath) '..' | Resolve-Path)/seed-data/ics-cisa"
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
New-Item -Path $Destination -ItemType Directory -Force | Out-Null
|
||||
|
||||
Function Write-Info($Message) { Write-Host "[ics-seed] $Message" }
|
||||
Function Write-ErrorLine($Message) { Write-Host "[ics-seed][error] $Message" -ForegroundColor Red }
|
||||
|
||||
Function Download-File($Url, $Path) {
|
||||
Write-Info "Downloading $(Split-Path $Path -Leaf)"
|
||||
Invoke-WebRequest -Uri $Url -OutFile $Path -UseBasicParsing
|
||||
$hash = Get-FileHash -Path $Path -Algorithm SHA256
|
||||
$hash.Hash | Out-File -FilePath "$Path.sha256" -Encoding ascii
|
||||
}
|
||||
|
||||
$base = 'https://raw.githubusercontent.com/icsadvprj/ICS-Advisory-Project/main/ICS-CERT_ADV'
|
||||
$master = 'CISA_ICS_ADV_Master.csv'
|
||||
$snapshot = 'CISA_ICS_ADV_2025_10_09.csv'
|
||||
|
||||
Write-Info 'Fetching ICS advisories seed data (ODbL v1.0)'
|
||||
Download-File "$base/$master" (Join-Path $Destination $master)
|
||||
Download-File "$base/$snapshot" (Join-Path $Destination $snapshot)
|
||||
|
||||
$medicalUrl = 'https://raw.githubusercontent.com/batarr22/ICSMA_CSV/main/ICSMA_CSV_4-20-2023.xlsx'
|
||||
$medicalFile = 'ICSMA_CSV_4-20-2023.xlsx'
|
||||
Write-Info 'Fetching community ICSMA snapshot'
|
||||
try {
|
||||
Download-File $medicalUrl (Join-Path $Destination $medicalFile)
|
||||
}
|
||||
catch {
|
||||
Write-ErrorLine "Unable to download $medicalFile (optional): $_"
|
||||
Remove-Item (Join-Path $Destination $medicalFile) -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
Write-Info "Seed data ready in $Destination"
|
||||
Write-Info 'Remember: data is licensed under ODbL v1.0 (see seed README).'
|
||||
38
devops/tools/fetch-ics-cisa-seed.sh
Normal file
38
devops/tools/fetch-ics-cisa-seed.sh
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
DEST_DIR="${1:-$ROOT_DIR/seed-data/ics-cisa}"
|
||||
mkdir -p "$DEST_DIR"
|
||||
|
||||
info() { printf "[ics-seed] %s\n" "$*"; }
|
||||
error() { printf "[ics-seed][error] %s\n" "$*" >&2; }
|
||||
|
||||
download() {
|
||||
local url="$1"
|
||||
local target="$2"
|
||||
info "Downloading $(basename "$target")"
|
||||
curl -fL "$url" -o "$target"
|
||||
sha256sum "$target" > "$target.sha256"
|
||||
}
|
||||
|
||||
BASE="https://raw.githubusercontent.com/icsadvprj/ICS-Advisory-Project/main/ICS-CERT_ADV"
|
||||
MASTER_FILE="CISA_ICS_ADV_Master.csv"
|
||||
SNAPSHOT_2025="CISA_ICS_ADV_2025_10_09.csv"
|
||||
|
||||
info "Fetching ICS advisories seed data (ODbL v1.0)"
|
||||
download "$BASE/$MASTER_FILE" "$DEST_DIR/$MASTER_FILE"
|
||||
download "$BASE/$SNAPSHOT_2025" "$DEST_DIR/$SNAPSHOT_2025"
|
||||
|
||||
MEDICAL_URL="https://raw.githubusercontent.com/batarr22/ICSMA_CSV/main/ICSMA_CSV_4-20-2023.xlsx"
|
||||
MEDICAL_FILE="ICSMA_CSV_4-20-2023.xlsx"
|
||||
info "Fetching community ICSMA snapshot"
|
||||
if curl -fL "$MEDICAL_URL" -o "$DEST_DIR/$MEDICAL_FILE"; then
|
||||
sha256sum "$DEST_DIR/$MEDICAL_FILE" > "$DEST_DIR/$MEDICAL_FILE.sha256"
|
||||
else
|
||||
error "Unable to download $MEDICAL_FILE (optional)."
|
||||
rm -f "$DEST_DIR/$MEDICAL_FILE"
|
||||
fi
|
||||
|
||||
info "Seed data ready in $DEST_DIR"
|
||||
info "Remember: data is licensed under ODbL v1.0 (see seed README)."
|
||||
47
devops/tools/graph/load-test.sh
Normal file
47
devops/tools/graph/load-test.sh
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# DEVOPS-GRAPH-24-001: load test graph index/adjacency APIs
|
||||
|
||||
TARGET=${TARGET:-"http://localhost:5000"}
|
||||
OUT="out/graph-load"
|
||||
mkdir -p "$OUT"
|
||||
|
||||
USERS=${USERS:-8}
|
||||
DURATION=${DURATION:-60}
|
||||
RATE=${RATE:-200}
|
||||
|
||||
cat > "${OUT}/k6-graph.js" <<'EOF'
|
||||
import http from 'k6/http';
|
||||
import { sleep } from 'k6';
|
||||
|
||||
export const options = {
|
||||
vus: __USERS__,
|
||||
duration: '__DURATION__s',
|
||||
thresholds: {
|
||||
http_req_duration: ['p(95)<500'],
|
||||
http_req_failed: ['rate<0.01'],
|
||||
},
|
||||
};
|
||||
|
||||
const targets = [
|
||||
'/graph/api/index',
|
||||
'/graph/api/adjacency?limit=100',
|
||||
'/graph/api/search?q=log4j',
|
||||
];
|
||||
|
||||
export default function () {
|
||||
const host = __TARGET__;
|
||||
targets.forEach(path => http.get(`${host}${path}`));
|
||||
sleep(1);
|
||||
}
|
||||
EOF
|
||||
|
||||
sed -i "s/__USERS__/${USERS}/g" "${OUT}/k6-graph.js"
|
||||
sed -i "s/__DURATION__/${DURATION}/g" "${OUT}/k6-graph.js"
|
||||
sed -i "s@__TARGET__@\"${TARGET}\"@g" "${OUT}/k6-graph.js"
|
||||
|
||||
echo "[graph-load] running k6..."
|
||||
k6 run "${OUT}/k6-graph.js" --summary-export "${OUT}/summary.json" --http-debug="off"
|
||||
|
||||
echo "[graph-load] summary written to ${OUT}/summary.json"
|
||||
21
devops/tools/graph/simulation-smoke.sh
Normal file
21
devops/tools/graph/simulation-smoke.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# DEVOPS-GRAPH-24-003: simulation endpoint smoke
|
||||
|
||||
TARGET=${TARGET:-"http://localhost:5000"}
|
||||
OUT="out/graph-sim"
|
||||
mkdir -p "$OUT"
|
||||
|
||||
echo "[graph-sim] hitting simulation endpoints"
|
||||
|
||||
curl -sSf "${TARGET}/graph/api/simulation/ping" > "${OUT}/ping.json"
|
||||
curl -sSf "${TARGET}/graph/api/simulation/run?limit=5" > "${OUT}/run.json"
|
||||
|
||||
cat > "${OUT}/summary.txt" <<EOF
|
||||
ping: $(jq -r '.status' "${OUT}/ping.json" 2>/dev/null || echo "unknown")
|
||||
run_len: $(jq '. | length' "${OUT}/run.json" 2>/dev/null || echo "0")
|
||||
EOF
|
||||
|
||||
echo "[graph-sim] completed; summary:"
|
||||
cat "${OUT}/summary.txt"
|
||||
30
devops/tools/graph/ui-perf.ts
Normal file
30
devops/tools/graph/ui-perf.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { chromium } from 'playwright';
|
||||
import fs from 'fs';
|
||||
|
||||
const BASE_URL = process.env.GRAPH_UI_BASE ?? 'http://localhost:4200';
|
||||
const OUT = process.env.OUT ?? 'out/graph-ui-perf';
|
||||
const BUDGET_MS = Number(process.env.GRAPH_UI_BUDGET_MS ?? '3000');
|
||||
|
||||
(async () => {
|
||||
fs.mkdirSync(OUT, { recursive: true });
|
||||
const browser = await chromium.launch({ headless: true });
|
||||
const page = await browser.newPage();
|
||||
|
||||
const start = Date.now();
|
||||
await page.goto(`${BASE_URL}/graph`, { waitUntil: 'networkidle' });
|
||||
await page.click('text=Explore'); // assumes nav element
|
||||
await page.waitForSelector('canvas');
|
||||
const duration = Date.now() - start;
|
||||
|
||||
const metrics = await page.evaluate(() => JSON.stringify(window.performance.timing));
|
||||
fs.writeFileSync(`${OUT}/timing.json`, metrics);
|
||||
fs.writeFileSync(`${OUT}/duration.txt`, `${duration}`);
|
||||
|
||||
if (duration > BUDGET_MS) {
|
||||
console.error(`[graph-ui] perf budget exceeded: ${duration}ms > ${BUDGET_MS}ms`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
await browser.close();
|
||||
console.log(`[graph-ui] load duration ${duration}ms (budget ${BUDGET_MS}ms)`);
|
||||
})();
|
||||
75
devops/tools/kisa_capture_html.py
Normal file
75
devops/tools/kisa_capture_html.py
Normal file
@@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Download KISA/KNVD advisory HTML pages for offline analysis."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.parse import parse_qs, urlsplit
|
||||
from urllib.request import Request, urlopen
|
||||
|
||||
FEED_URL = "https://knvd.krcert.or.kr/rss/securityInfo.do"
|
||||
USER_AGENT = "Mozilla/5.0 (compatible; StellaOpsOffline/1.0)"
|
||||
|
||||
|
||||
def fetch(url: str) -> bytes:
|
||||
req = Request(url, headers={"User-Agent": USER_AGENT})
|
||||
with urlopen(req, timeout=15) as resp:
|
||||
return resp.read()
|
||||
|
||||
|
||||
def iter_idxs(feed_xml: bytes) -> list[tuple[str, str]]:
|
||||
root = ET.fromstring(feed_xml)
|
||||
items = []
|
||||
for item in root.findall(".//item"):
|
||||
title = (item.findtext("title") or "").strip()
|
||||
link = item.findtext("link") or ""
|
||||
idx = parse_qs(urlsplit(link).query).get("IDX", [None])[0]
|
||||
if idx:
|
||||
items.append((idx, title))
|
||||
return items
|
||||
|
||||
|
||||
def capture(idx: str, title: str, out_dir: Path) -> Path:
|
||||
url = f"https://knvd.krcert.or.kr/detailDos.do?IDX={idx}"
|
||||
html = fetch(url)
|
||||
target = out_dir / f"{idx}.html"
|
||||
target.write_bytes(html)
|
||||
print(f"saved {target} ({title})")
|
||||
return target
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--out", type=Path, default=Path("seed-data/kisa/html"))
|
||||
parser.add_argument("--limit", type=int, default=10, help="Maximum advisories to download")
|
||||
args = parser.parse_args()
|
||||
|
||||
args.out.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
print(f"[{dt.datetime.utcnow():%Y-%m-%d %H:%M:%S}Z] fetching RSS feed…")
|
||||
try:
|
||||
feed = fetch(FEED_URL)
|
||||
except (URLError, HTTPError) as exc:
|
||||
print("RSS fetch failed:", exc, file=sys.stderr)
|
||||
return 1
|
||||
|
||||
items = iter_idxs(feed)[: args.limit]
|
||||
if not items:
|
||||
print("No advisories found in feed", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
for idx, title in items:
|
||||
try:
|
||||
capture(idx, title, args.out)
|
||||
except (URLError, HTTPError) as exc:
|
||||
print(f"failed {idx}: {exc}", file=sys.stderr)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
24
devops/tools/linksets-ci.sh
Normal file
24
devops/tools/linksets-ci.sh
Normal file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
# CI runner profile for Concelier /linksets tests without harness workdir injection.
|
||||
set -euo pipefail
|
||||
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
PROJECT="$ROOT_DIR/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj"
|
||||
DOTNET_EXE=$(command -v dotnet)
|
||||
if [[ -z "$DOTNET_EXE" ]]; then
|
||||
echo "dotnet not found" >&2; exit 1; fi
|
||||
export VSTEST_DISABLE_APPDOMAIN=1
|
||||
export DOTNET_CLI_UI_LANGUAGE=en
|
||||
export DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||
# Prefer the curated offline feed to avoid network flakiness during CI.
|
||||
export NUGET_PACKAGES="${ROOT_DIR}/.nuget/packages"
|
||||
RESTORE_SOURCE="--source ${ROOT_DIR}/.nuget/packages --ignore-failed-sources"
|
||||
# Ensure Mongo2Go can find OpenSSL 1.1 (needed by bundled mongod)
|
||||
OPENSSL11_DIR="$ROOT_DIR/tools/openssl1.1/lib"
|
||||
if [[ -d "$OPENSSL11_DIR" ]]; then
|
||||
export LD_LIBRARY_PATH="$OPENSSL11_DIR:${LD_LIBRARY_PATH:-}"
|
||||
fi
|
||||
RESULTS_DIR="$ROOT_DIR/out/test-results/linksets"
|
||||
mkdir -p "$RESULTS_DIR"
|
||||
# Restore explicitly against offline cache, then run tests without restoring again.
|
||||
"$ROOT_DIR/tools/dotnet-filter.sh" restore "$PROJECT" $RESTORE_SOURCE
|
||||
exec "$ROOT_DIR/tools/dotnet-filter.sh" test "$PROJECT" --no-restore --filter "Linksets" --results-directory "$RESULTS_DIR" --logger "trx;LogFileName=linksets.trx"
|
||||
7
devops/tools/lint/implementor-guidelines.sh
Normal file
7
devops/tools/lint/implementor-guidelines.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Stub lint: enforce docs tag placeholder until full checks land.
|
||||
if git diff --cached --name-only | grep -q '^docs/'; then
|
||||
echo "[stub] docs touched: ensure commit includes 'docs:' trailer (value or 'n/a')"
|
||||
fi
|
||||
57
devops/tools/lnm/alerts/lnm-alerts.yaml
Normal file
57
devops/tools/lnm/alerts/lnm-alerts.yaml
Normal file
@@ -0,0 +1,57 @@
|
||||
# LNM Migration Alert Rules
|
||||
# Prometheus alerting rules for linkset/advisory migrations
|
||||
|
||||
groups:
|
||||
- name: lnm-migration
|
||||
rules:
|
||||
- alert: LnmMigrationErrorRate
|
||||
expr: rate(lnm_migration_errors_total[5m]) > 0.1
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
team: concelier
|
||||
annotations:
|
||||
summary: "LNM migration error rate elevated"
|
||||
description: "Migration errors: {{ $value | printf \"%.2f\" }}/s"
|
||||
|
||||
- alert: LnmBackfillStalled
|
||||
expr: increase(lnm_backfill_processed_total[10m]) == 0 and lnm_backfill_running == 1
|
||||
for: 10m
|
||||
labels:
|
||||
severity: critical
|
||||
team: concelier
|
||||
annotations:
|
||||
summary: "LNM backfill stalled"
|
||||
description: "No progress in 10 minutes while backfill is running"
|
||||
|
||||
- alert: LnmLinksetCountMismatch
|
||||
expr: abs(lnm_linksets_total - lnm_linksets_expected) > 100
|
||||
for: 15m
|
||||
labels:
|
||||
severity: warning
|
||||
team: concelier
|
||||
annotations:
|
||||
summary: "Linkset count mismatch"
|
||||
description: "Expected {{ $labels.expected }}, got {{ $value }}"
|
||||
|
||||
- alert: LnmObservationsBacklogHigh
|
||||
expr: lnm_observations_backlog > 10000
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
team: excititor
|
||||
annotations:
|
||||
summary: "Advisory observations backlog high"
|
||||
description: "Backlog: {{ $value }} items"
|
||||
|
||||
- name: lnm-sla
|
||||
rules:
|
||||
- alert: LnmIngestToApiLatencyHigh
|
||||
expr: histogram_quantile(0.95, rate(lnm_ingest_to_api_latency_seconds_bucket[5m])) > 30
|
||||
for: 10m
|
||||
labels:
|
||||
severity: warning
|
||||
team: platform
|
||||
annotations:
|
||||
summary: "Ingest to API latency exceeds SLA"
|
||||
description: "P95 latency: {{ $value | printf \"%.1f\" }}s (SLA: 30s)"
|
||||
32
devops/tools/lnm/backfill-plan.md
Normal file
32
devops/tools/lnm/backfill-plan.md
Normal file
@@ -0,0 +1,32 @@
|
||||
# LNM Backfill Plan (DEVOPS-LNM-22-001)
|
||||
|
||||
## Goal
|
||||
Run staging backfill for advisory observations/linksets, validate counts/conflicts, and document rollout steps for production.
|
||||
|
||||
## Prereqs
|
||||
- Concelier API CCLN0102 available (advisory/linkset endpoints stable).
|
||||
- Staging Mongo snapshot taken (pre-backfill) and stored at `s3://staging-backups/concelier-pre-lnmbf.gz`.
|
||||
- NATS/Redis staging brokers reachable.
|
||||
|
||||
## Steps
|
||||
1) Seed snapshot
|
||||
- Restore staging Mongo from pre-backfill snapshot.
|
||||
2) Run backfill job
|
||||
- `dotnet run --project src/Concelier/StellaOps.Concelier.Backfill -- --mode=observations --batch-size=500 --max-conflicts=0`
|
||||
- `dotnet run --project src/Concelier/StellaOps.Concelier.Backfill -- --mode=linksets --batch-size=500 --max-conflicts=0`
|
||||
3) Validate counts
|
||||
- Compare `advisory_observations_total` and `linksets_total` vs expected inventory; export to `.artifacts/lnm-counts.json`.
|
||||
- Check conflict log `.artifacts/lnm-conflicts.ndjson` (must be empty).
|
||||
4) Events/NATS smoke
|
||||
- Ensure `concelier.lnm.backfill.completed` emitted; verify Redis/NATS queues drained.
|
||||
5) Roll-forward checklist
|
||||
- Promote batch size to 2000 for prod, keep `--max-conflicts=0`.
|
||||
- Schedule maintenance window, ensure snapshot available for rollback.
|
||||
|
||||
## Outputs
|
||||
- `.artifacts/lnm-counts.json`
|
||||
- `.artifacts/lnm-conflicts.ndjson` (empty)
|
||||
- Log of job runtime + throughput.
|
||||
|
||||
## Acceptance
|
||||
- Zero conflicts; counts match expected; events emitted; rollback plan documented.
|
||||
24
devops/tools/lnm/backfill-validation.sh
Normal file
24
devops/tools/lnm/backfill-validation.sh
Normal file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
ROOT=${ROOT:-$(cd "$(dirname "$0")/../.." && pwd)}
|
||||
ARTifacts=${ARTifacts:-$ROOT/.artifacts}
|
||||
COUNTS=$ARTifacts/lnm-counts.json
|
||||
CONFLICTS=$ARTifacts/lnm-conflicts.ndjson
|
||||
mkdir -p "$ARTifacts"
|
||||
|
||||
mongoexport --uri "${STAGING_MONGO_URI:?set STAGING_MONGO_URI}" --collection advisoryObservations --db concelier --type=json --query '{}' --out "$ARTifacts/obs.json" >/dev/null
|
||||
mongoexport --uri "${STAGING_MONGO_URI:?set STAGING_MONGO_URI}" --collection linksets --db concelier --type=json --query '{}' --out "$ARTifacts/linksets.json" >/dev/null
|
||||
|
||||
OBS=$(jq length "$ARTifacts/obs.json")
|
||||
LNK=$(jq length "$ARTifacts/linksets.json")
|
||||
|
||||
cat > "$COUNTS" <<JSON
|
||||
{
|
||||
"observations": $OBS,
|
||||
"linksets": $LNK,
|
||||
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
}
|
||||
JSON
|
||||
|
||||
touch "$CONFLICTS"
|
||||
echo "Counts written to $COUNTS; conflicts at $CONFLICTS"
|
||||
51
devops/tools/lnm/dashboards/lnm-migration.json
Normal file
51
devops/tools/lnm/dashboards/lnm-migration.json
Normal file
@@ -0,0 +1,51 @@
|
||||
{
|
||||
"dashboard": {
|
||||
"title": "LNM Migration Dashboard",
|
||||
"uid": "lnm-migration",
|
||||
"tags": ["lnm", "migration", "concelier", "excititor"],
|
||||
"timezone": "utc",
|
||||
"refresh": "30s",
|
||||
"panels": [
|
||||
{
|
||||
"title": "Migration Progress",
|
||||
"type": "stat",
|
||||
"gridPos": {"x": 0, "y": 0, "w": 6, "h": 4},
|
||||
"targets": [
|
||||
{"expr": "lnm_backfill_processed_total", "legendFormat": "Processed"}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Error Rate",
|
||||
"type": "graph",
|
||||
"gridPos": {"x": 6, "y": 0, "w": 12, "h": 4},
|
||||
"targets": [
|
||||
{"expr": "rate(lnm_migration_errors_total[5m])", "legendFormat": "Errors/s"}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Linksets Total",
|
||||
"type": "stat",
|
||||
"gridPos": {"x": 18, "y": 0, "w": 6, "h": 4},
|
||||
"targets": [
|
||||
{"expr": "lnm_linksets_total", "legendFormat": "Total"}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Observations Backlog",
|
||||
"type": "graph",
|
||||
"gridPos": {"x": 0, "y": 4, "w": 12, "h": 6},
|
||||
"targets": [
|
||||
{"expr": "lnm_observations_backlog", "legendFormat": "Backlog"}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Ingest to API Latency (P95)",
|
||||
"type": "graph",
|
||||
"gridPos": {"x": 12, "y": 4, "w": 12, "h": 6},
|
||||
"targets": [
|
||||
{"expr": "histogram_quantile(0.95, rate(lnm_ingest_to_api_latency_seconds_bucket[5m]))", "legendFormat": "P95"}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
11
devops/tools/lnm/metrics-ci-check.sh
Normal file
11
devops/tools/lnm/metrics-ci-check.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
DASHBOARD=${1:-ops/devops/lnm/metrics-dashboard.json}
|
||||
jq . "$DASHBOARD" >/dev/null
|
||||
REQUIRED=("advisory_observations_total" "linksets_total" "ingest_api_latency_seconds_bucket" "lnm_backfill_processed_total")
|
||||
for metric in "${REQUIRED[@]}"; do
|
||||
if ! grep -q "$metric" "$DASHBOARD"; then
|
||||
echo "::error::metric $metric missing from dashboard"; exit 1
|
||||
fi
|
||||
done
|
||||
echo "dashboard metrics present"
|
||||
9
devops/tools/lnm/metrics-dashboard.json
Normal file
9
devops/tools/lnm/metrics-dashboard.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"title": "LNM Backfill Metrics",
|
||||
"panels": [
|
||||
{"type": "stat", "title": "Observations", "targets": [{"expr": "advisory_observations_total"}]},
|
||||
{"type": "stat", "title": "Linksets", "targets": [{"expr": "linksets_total"}]},
|
||||
{"type": "graph", "title": "Ingest→API latency p95", "targets": [{"expr": "histogram_quantile(0.95, rate(ingest_api_latency_seconds_bucket[5m]))"}]},
|
||||
{"type": "graph", "title": "Backfill throughput", "targets": [{"expr": "rate(lnm_backfill_processed_total[5m])"}]}
|
||||
]
|
||||
}
|
||||
92
devops/tools/lnm/package-runner.sh
Normal file
92
devops/tools/lnm/package-runner.sh
Normal file
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env bash
|
||||
# Package LNM migration runner for release/offline kit
|
||||
# Usage: ./package-runner.sh
|
||||
# Dev mode: COSIGN_ALLOW_DEV_KEY=1 COSIGN_PASSWORD=stellaops-dev ./package-runner.sh
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
ROOT=$(cd "$(dirname "$0")/../../.." && pwd)
|
||||
OUT_DIR="${OUT_DIR:-$ROOT/out/lnm}"
|
||||
CREATED="${CREATED:-$(date -u +%Y-%m-%dT%H:%M:%SZ)}"
|
||||
|
||||
mkdir -p "$OUT_DIR/runner"
|
||||
|
||||
echo "==> LNM Migration Runner Packaging"
|
||||
|
||||
# Key resolution
|
||||
resolve_key() {
|
||||
if [[ -n "${COSIGN_PRIVATE_KEY_B64:-}" ]]; then
|
||||
local tmp_key="$OUT_DIR/.cosign.key"
|
||||
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > "$tmp_key"
|
||||
chmod 600 "$tmp_key"
|
||||
echo "$tmp_key"
|
||||
elif [[ -f "$ROOT/tools/cosign/cosign.key" ]]; then
|
||||
echo "$ROOT/tools/cosign/cosign.key"
|
||||
elif [[ "${COSIGN_ALLOW_DEV_KEY:-0}" == "1" && -f "$ROOT/tools/cosign/cosign.dev.key" ]]; then
|
||||
echo "[info] Using development key" >&2
|
||||
echo "$ROOT/tools/cosign/cosign.dev.key"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Build migration runner if project exists
|
||||
MIGRATION_PROJECT="$ROOT/src/Concelier/__Libraries/StellaOps.Concelier.Migrations/StellaOps.Concelier.Migrations.csproj"
|
||||
if [[ -f "$MIGRATION_PROJECT" ]]; then
|
||||
echo "==> Building migration runner..."
|
||||
dotnet publish "$MIGRATION_PROJECT" -c Release -o "$OUT_DIR/runner" --no-restore 2>/dev/null || \
|
||||
echo "[info] Build skipped (may need restore or project doesn't exist yet)"
|
||||
else
|
||||
echo "[info] Migration project not found; creating placeholder"
|
||||
cat > "$OUT_DIR/runner/README.txt" <<EOF
|
||||
LNM Migration Runner Placeholder
|
||||
Build from: src/Concelier/__Libraries/StellaOps.Concelier.Migrations/
|
||||
Created: $CREATED
|
||||
Status: Awaiting upstream migration project
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Create runner bundle
|
||||
echo "==> Creating runner bundle..."
|
||||
RUNNER_TAR="$OUT_DIR/lnm-migration-runner.tar.gz"
|
||||
tar -czf "$RUNNER_TAR" -C "$OUT_DIR/runner" .
|
||||
|
||||
# Compute hash
|
||||
sha256() { sha256sum "$1" | awk '{print $1}'; }
|
||||
RUNNER_HASH=$(sha256 "$RUNNER_TAR")
|
||||
|
||||
# Generate manifest
|
||||
MANIFEST="$OUT_DIR/lnm-migration-runner.manifest.json"
|
||||
cat > "$MANIFEST" <<EOF
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"created": "$CREATED",
|
||||
"runner": {
|
||||
"path": "lnm-migration-runner.tar.gz",
|
||||
"sha256": "$RUNNER_HASH"
|
||||
},
|
||||
"migrations": {
|
||||
"22-001": {"status": "infrastructure-ready", "description": "Advisory observations/linksets staging"},
|
||||
"22-002": {"status": "infrastructure-ready", "description": "VEX observation/linkset backfill"},
|
||||
"22-003": {"status": "infrastructure-ready", "description": "Metrics monitoring"}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
# Sign if key available
|
||||
KEY_FILE=$(resolve_key)
|
||||
if [[ -n "$KEY_FILE" ]] && command -v cosign &>/dev/null; then
|
||||
echo "==> Signing bundle..."
|
||||
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" cosign sign-blob \
|
||||
--key "$KEY_FILE" \
|
||||
--bundle "$OUT_DIR/lnm-migration-runner.dsse.json" \
|
||||
--tlog-upload=false --yes "$RUNNER_TAR" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Generate checksums
|
||||
cd "$OUT_DIR"
|
||||
sha256sum lnm-migration-runner.tar.gz lnm-migration-runner.manifest.json > SHA256SUMS
|
||||
|
||||
echo "==> LNM runner packaging complete"
|
||||
echo " Bundle: $RUNNER_TAR"
|
||||
echo " Manifest: $MANIFEST"
|
||||
53
devops/tools/lnm/tooling-infrastructure.md
Normal file
53
devops/tools/lnm/tooling-infrastructure.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# LNM (Link-Not-Merge) Tooling Infrastructure
|
||||
|
||||
## Scope (DEVOPS-LNM-TOOLING-22-000)
|
||||
Package and tooling for linkset/advisory migrations across Concelier and Excititor.
|
||||
|
||||
## Components
|
||||
|
||||
### 1. Migration Runner
|
||||
Location: `src/Concelier/__Libraries/StellaOps.Concelier.Migrations/`
|
||||
|
||||
```bash
|
||||
# Build migration runner
|
||||
dotnet publish src/Concelier/__Libraries/StellaOps.Concelier.Migrations \
|
||||
-c Release -o out/lnm/runner
|
||||
|
||||
# Package
|
||||
./ops/devops/lnm/package-runner.sh
|
||||
```
|
||||
|
||||
### 2. Backfill Tool
|
||||
Location: `src/Concelier/StellaOps.Concelier.Backfill/` (when available)
|
||||
|
||||
```bash
|
||||
# Dev mode backfill with sample data
|
||||
COSIGN_ALLOW_DEV_KEY=1 ./ops/devops/lnm/run-backfill.sh --dry-run
|
||||
|
||||
# Production backfill
|
||||
./ops/devops/lnm/run-backfill.sh --batch-size=500
|
||||
```
|
||||
|
||||
### 3. Monitoring Dashboard
|
||||
- Grafana dashboard: `ops/devops/lnm/dashboards/lnm-migration.json`
|
||||
- Alert rules: `ops/devops/lnm/alerts/lnm-alerts.yaml`
|
||||
|
||||
## CI Workflows
|
||||
|
||||
| Workflow | Purpose |
|
||||
|----------|---------|
|
||||
| `lnm-migration-ci.yml` | Build/test migration runner |
|
||||
| `lnm-backfill-staging.yml` | Run backfill in staging |
|
||||
| `lnm-metrics-ci.yml` | Validate migration metrics |
|
||||
|
||||
## Outputs
|
||||
- `out/lnm/runner/` - Migration runner binaries
|
||||
- `out/lnm/backfill-report.json` - Backfill results
|
||||
- `out/lnm/SHA256SUMS` - Checksums
|
||||
|
||||
## Status
|
||||
- [x] Infrastructure plan created
|
||||
- [ ] Migration runner project (awaiting upstream)
|
||||
- [ ] Backfill tool (awaiting upstream)
|
||||
- [x] CI workflow templates ready
|
||||
- [x] Monitoring templates ready
|
||||
20
devops/tools/lnm/vex-backfill-plan.md
Normal file
20
devops/tools/lnm/vex-backfill-plan.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# VEX Backfill Plan (DEVOPS-LNM-22-002)
|
||||
|
||||
## Goal
|
||||
Run VEX observation/linkset backfill with monitoring, ensure events flow via NATS/Redis, and capture run artifacts.
|
||||
|
||||
## Steps
|
||||
1) Pre-checks
|
||||
- Confirm DEVOPS-LNM-22-001 counts baseline (`.artifacts/lnm-counts.json`).
|
||||
- Ensure `STAGING_MONGO_URI`, `NATS_URL`, `REDIS_URL` available (read-only or test brokers).
|
||||
2) Run VEX backfill
|
||||
- `dotnet run --project src/Concelier/StellaOps.Concelier.Backfill -- --mode=vex --batch-size=500 --max-conflicts=0 --mongo $STAGING_MONGO_URI --nats $NATS_URL --redis $REDIS_URL`
|
||||
3) Metrics capture
|
||||
- Export per-run metrics to `.artifacts/vex-backfill-metrics.json` (duration, processed, conflicts, events emitted).
|
||||
4) Event verification
|
||||
- Subscribe to `concelier.vex.backfill.completed` and `concelier.linksets.vex.upserted`; ensure queues drained.
|
||||
5) Roll-forward checklist
|
||||
- Increase batch size to 2000 for prod; keep conflicts = 0; schedule maintenance window.
|
||||
|
||||
## Acceptance
|
||||
- Zero conflicts; events observed; metrics file present; rollback plan documented.
|
||||
14
devops/tools/mirror/README.md
Normal file
14
devops/tools/mirror/README.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# Mirror signing helpers
|
||||
|
||||
- `make-thin-v1.sh`: builds thin bundle v1, computes checksums, emits bundle meta (offline/rekor/mirror gaps), optional DSSE+TUF signing when `SIGN_KEY` is set, and runs verifier.
|
||||
- `sign_thin_bundle.py`: signs manifest (DSSE), bundle meta (DSSE), and root/targets/snapshot/timestamp JSON using an Ed25519 PEM key.
|
||||
- `verify_thin_bundle.py`: checks SHA256 sidecars, manifest schema, tar determinism, required layers, optional bundle meta and DSSE signatures; accepts `--bundle-meta`, `--pubkey`, `--tenant`, `--environment`.
|
||||
- `ci-sign.sh`: CI wrapper. Set `MIRROR_SIGN_KEY_B64` (base64-encoded Ed25519 PEM) and run; it builds, signs, and verifies in one step, emitting `milestone.json` with manifest/tar/bundle hashes.
|
||||
- `verify_oci_layout.py`: validates OCI layout/index/manifest and blob digests when `OCI=1` is used.
|
||||
- `mirror-create.sh`: convenience wrapper to build + verify thin bundles (optional SIGN_KEY, time anchor, OCI flag).
|
||||
- `mirror-verify.sh`: wrapper around `verify_thin_bundle.py` for quick hash/DSSE checks.
|
||||
- `schedule-export-center-run.sh`: schedules an Export Center run for mirror bundles via HTTP POST; set `EXPORT_CENTER_BASE_URL`, `EXPORT_CENTER_TENANT`, `EXPORT_CENTER_TOKEN` (Bearer), optional `EXPORT_CENTER_PROJECT`; logs to `AUDIT_LOG_PATH` (default `logs/export-center-schedule.log`). Set `EXPORT_CENTER_ARTIFACTS_JSON` to inject bundle metadata into the request payload.
|
||||
- `export-center-wire.sh`: builds `export-center-handoff.json` from `out/mirror/thin/milestone.json`, emits recommended Export Center targets, and (when `EXPORT_CENTER_AUTO_SCHEDULE=1`) calls `schedule-export-center-run.sh` to push the run. Outputs live under `out/mirror/thin/export-center/`.
|
||||
- CI: `.gitea/workflows/mirror-sign.yml` runs this script after signing; scheduling remains opt-in via secrets `EXPORT_CENTER_BASE_URL`, `EXPORT_CENTER_TOKEN`, `EXPORT_CENTER_TENANT`, `EXPORT_CENTER_PROJECT`, `EXPORT_CENTER_AUTO_SCHEDULE`.
|
||||
|
||||
Artifacts live under `out/mirror/thin/`.
|
||||
BIN
devops/tools/mirror/__pycache__/sign_thin_bundle.cpython-312.pyc
Normal file
BIN
devops/tools/mirror/__pycache__/sign_thin_bundle.cpython-312.pyc
Normal file
Binary file not shown.
Binary file not shown.
20
devops/tools/mirror/check_signing_prereqs.sh
Normal file
20
devops/tools/mirror/check_signing_prereqs.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
# Verifies signing prerequisites without requiring the actual key contents.
|
||||
set -euo pipefail
|
||||
if [[ -z "${MIRROR_SIGN_KEY_B64:-}" ]]; then
|
||||
if [[ "${REQUIRE_PROD_SIGNING:-0}" == "1" ]]; then
|
||||
echo "[error] MIRROR_SIGN_KEY_B64 is required for production signing; set the secret before running." >&2
|
||||
exit 2
|
||||
fi
|
||||
echo "[warn] MIRROR_SIGN_KEY_B64 is not set; ci-sign.sh will fall back to embedded test key (non-production)." >&2
|
||||
fi
|
||||
# basic base64 sanity check
|
||||
if ! printf "%s" "$MIRROR_SIGN_KEY_B64" | base64 -d >/dev/null 2>&1; then
|
||||
echo "MIRROR_SIGN_KEY_B64 is not valid base64" >&2
|
||||
exit 3
|
||||
fi
|
||||
# ensure scripts exist
|
||||
for f in scripts/mirror/ci-sign.sh scripts/mirror/sign_thin_bundle.py scripts/mirror/verify_thin_bundle.py; do
|
||||
[[ -x "$f" || -f "$f" ]] || { echo "$f missing" >&2; exit 4; }
|
||||
done
|
||||
echo "Signing prerequisites present (key env set, scripts available)."
|
||||
116
devops/tools/mirror/ci-sign.sh
Normal file
116
devops/tools/mirror/ci-sign.sh
Normal file
@@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
# Allow CI to fall back to a deterministic test key when MIRROR_SIGN_KEY_B64 is unset,
|
||||
# but forbid this on release/tag builds when REQUIRE_PROD_SIGNING=1.
|
||||
# Throwaway dev key (Ed25519) generated 2025-11-23; matches the value documented in
|
||||
# docs/modules/mirror/signing-runbook.md. Safe for non-production smoke only.
|
||||
DEFAULT_TEST_KEY_B64="LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1DNENBUUF3QlFZREsyVndCQ0lFSURqb3pDRVdKVVFUdW1xZ2gyRmZXcVBaemlQbkdaSzRvOFZRTThGYkZCSEcKLS0tLS1FTkQgUFJJVkFURSBLRVktLS0tLQo="
|
||||
if [[ -z "${MIRROR_SIGN_KEY_B64:-}" ]]; then
|
||||
if [[ "${REQUIRE_PROD_SIGNING:-0}" == "1" ]]; then
|
||||
echo "[error] MIRROR_SIGN_KEY_B64 is required for production signing; refusing to use test key." >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "[warn] MIRROR_SIGN_KEY_B64 not set; using embedded test key (non-production) for CI signing" >&2
|
||||
MIRROR_SIGN_KEY_B64="$DEFAULT_TEST_KEY_B64"
|
||||
fi
|
||||
ROOT=$(cd "$(dirname "$0")/../.." && pwd)
|
||||
KEYDIR="$ROOT/out/mirror/thin/tuf/keys"
|
||||
mkdir -p "$KEYDIR"
|
||||
KEYFILE="$KEYDIR/ci-ed25519.pem"
|
||||
printf "%s" "$MIRROR_SIGN_KEY_B64" | base64 -d > "$KEYFILE"
|
||||
chmod 600 "$KEYFILE"
|
||||
# Export public key for TUF keyid calculation
|
||||
openssl pkey -in "$KEYFILE" -pubout -out "$KEYDIR/ci-ed25519.pub" >/dev/null 2>&1
|
||||
STAGE=${STAGE:-$ROOT/out/mirror/thin/stage-v1}
|
||||
CREATED=${CREATED:-$(date -u +%Y-%m-%dT%H:%M:%SZ)}
|
||||
TENANT_SCOPE=${TENANT_SCOPE:-tenant-demo}
|
||||
ENV_SCOPE=${ENV_SCOPE:-lab}
|
||||
CHUNK_SIZE=${CHUNK_SIZE:-5242880}
|
||||
CHECKPOINT_FRESHNESS=${CHECKPOINT_FRESHNESS:-86400}
|
||||
OCI=${OCI:-1}
|
||||
SIGN_KEY="$KEYFILE" STAGE="$STAGE" CREATED="$CREATED" TENANT_SCOPE="$TENANT_SCOPE" ENV_SCOPE="$ENV_SCOPE" CHUNK_SIZE="$CHUNK_SIZE" CHECKPOINT_FRESHNESS="$CHECKPOINT_FRESHNESS" OCI="$OCI" "$ROOT/src/Mirror/StellaOps.Mirror.Creator/make-thin-v1.sh"
|
||||
|
||||
# Default to staged time-anchor unless caller overrides
|
||||
TIME_ANCHOR_FILE=${TIME_ANCHOR_FILE:-$ROOT/out/mirror/thin/stage-v1/layers/time-anchor.json}
|
||||
|
||||
# Emit milestone summary with hashes for downstream consumers
|
||||
MANIFEST_PATH="$ROOT/out/mirror/thin/mirror-thin-v1.manifest.json"
|
||||
TAR_PATH="$ROOT/out/mirror/thin/mirror-thin-v1.tar.gz"
|
||||
DSSE_PATH="$ROOT/out/mirror/thin/mirror-thin-v1.manifest.dsse.json"
|
||||
BUNDLE_PATH="$ROOT/out/mirror/thin/mirror-thin-v1.bundle.json"
|
||||
BUNDLE_DSSE_PATH="$ROOT/out/mirror/thin/mirror-thin-v1.bundle.dsse.json"
|
||||
TIME_ANCHOR_DSSE_PATH="$TIME_ANCHOR_FILE.dsse.json"
|
||||
TRANSPORT_PATH="$ROOT/out/mirror/thin/stage-v1/layers/transport-plan.json"
|
||||
REKOR_POLICY_PATH="$ROOT/out/mirror/thin/stage-v1/layers/rekor-policy.json"
|
||||
MIRROR_POLICY_PATH="$ROOT/out/mirror/thin/stage-v1/layers/mirror-policy.json"
|
||||
OFFLINE_POLICY_PATH="$ROOT/out/mirror/thin/stage-v1/layers/offline-kit-policy.json"
|
||||
SUMMARY_PATH="$ROOT/out/mirror/thin/milestone.json"
|
||||
|
||||
sha256() {
|
||||
sha256sum "$1" | awk '{print $1}'
|
||||
}
|
||||
|
||||
# Sign manifest, bundle meta, and time-anchor (if present)
|
||||
python "$ROOT/scripts/mirror/sign_thin_bundle.py" \
|
||||
--key "$KEYFILE" \
|
||||
--manifest "$MANIFEST_PATH" \
|
||||
--tar "$TAR_PATH" \
|
||||
--tuf-dir "$ROOT/out/mirror/thin/tuf" \
|
||||
--bundle "$BUNDLE_PATH" \
|
||||
--time-anchor "$TIME_ANCHOR_FILE"
|
||||
|
||||
# Normalize time-anchor DSSE location for bundle meta/summary
|
||||
if [[ -f "$TIME_ANCHOR_FILE.dsse.json" ]]; then
|
||||
cp "$TIME_ANCHOR_FILE.dsse.json" "$TIME_ANCHOR_DSSE_PATH"
|
||||
fi
|
||||
|
||||
# Refresh bundle meta hashes now that DSSE files exist
|
||||
python - <<'PY'
|
||||
import json, pathlib, hashlib
|
||||
root = pathlib.Path("$ROOT")
|
||||
bundle_path = pathlib.Path("$BUNDLE_PATH")
|
||||
manifest_dsse = pathlib.Path("$DSSE_PATH")
|
||||
bundle_dsse = pathlib.Path("$BUNDLE_DSSE_PATH")
|
||||
time_anchor_dsse = pathlib.Path("$TIME_ANCHOR_DSSE_PATH")
|
||||
|
||||
def sha(path: pathlib.Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with path.open('rb') as f:
|
||||
for chunk in iter(lambda: f.read(8192), b''):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
data = json.loads(bundle_path.read_text())
|
||||
art = data.setdefault('artifacts', {})
|
||||
if manifest_dsse.exists():
|
||||
art.setdefault('manifest_dsse', {})['sha256'] = sha(manifest_dsse)
|
||||
if bundle_dsse.exists():
|
||||
art.setdefault('bundle_dsse', {})['sha256'] = sha(bundle_dsse)
|
||||
if time_anchor_dsse.exists():
|
||||
art.setdefault('time_anchor_dsse', {})['sha256'] = sha(time_anchor_dsse)
|
||||
|
||||
bundle_path.write_text(json.dumps(data, indent=2, sort_keys=True) + "\n")
|
||||
sha_path = bundle_path.with_suffix(bundle_path.suffix + '.sha256')
|
||||
sha_path.write_text(f"{sha(bundle_path)} {bundle_path.name}\n")
|
||||
PY
|
||||
|
||||
cat > "$SUMMARY_PATH" <<JSON
|
||||
{
|
||||
"created": "$CREATED",
|
||||
"manifest": {"path": "$(basename "$MANIFEST_PATH")", "sha256": "$(sha256 "$MANIFEST_PATH")"},
|
||||
"tarball": {"path": "$(basename "$TAR_PATH")", "sha256": "$(sha256 "$TAR_PATH")"},
|
||||
"dsse": $( [[ -f "$DSSE_PATH" ]] && echo "{\"path\": \"$(basename "$DSSE_PATH")\", \"sha256\": \"$(sha256 "$DSSE_PATH")\"}" || echo "null" ),
|
||||
"bundle": $( [[ -f "$BUNDLE_PATH" ]] && echo "{\"path\": \"$(basename "$BUNDLE_PATH")\", \"sha256\": \"$(sha256 "$BUNDLE_PATH")\"}" || echo "null" ),
|
||||
"bundle_dsse": $( [[ -f "$BUNDLE_DSSE_PATH" ]] && echo "{\"path\": \"$(basename "$BUNDLE_DSSE_PATH")\", \"sha256\": \"$(sha256 "$BUNDLE_DSSE_PATH")\"}" || echo "null" ),
|
||||
"time_anchor": $( [[ -n "${TIME_ANCHOR_FILE:-}" && -f "$TIME_ANCHOR_FILE" ]] && echo "{\"path\": \"$(basename "$TIME_ANCHOR_FILE")\", \"sha256\": \"$(sha256 "$TIME_ANCHOR_FILE")\"}" || echo "null" ),
|
||||
"time_anchor_dsse": $( [[ -f "$TIME_ANCHOR_DSSE_PATH" ]] && echo "{\"path\": \"$(basename "$TIME_ANCHOR_DSSE_PATH")\", \"sha256\": \"$(sha256 "$TIME_ANCHOR_DSSE_PATH")\"}" || echo "null" )
|
||||
,"policies": {
|
||||
"transport": {"path": "$(basename "$TRANSPORT_PATH")", "sha256": "$(sha256 "$TRANSPORT_PATH")"},
|
||||
"rekor": {"path": "$(basename "$REKOR_POLICY_PATH")", "sha256": "$(sha256 "$REKOR_POLICY_PATH")"},
|
||||
"mirror": {"path": "$(basename "$MIRROR_POLICY_PATH")", "sha256": "$(sha256 "$MIRROR_POLICY_PATH")"},
|
||||
"offline": {"path": "$(basename "$OFFLINE_POLICY_PATH")", "sha256": "$(sha256 "$OFFLINE_POLICY_PATH")"}
|
||||
}
|
||||
}
|
||||
JSON
|
||||
|
||||
echo "Milestone summary written to $SUMMARY_PATH"
|
||||
122
devops/tools/mirror/export-center-wire.sh
Executable file
122
devops/tools/mirror/export-center-wire.sh
Executable file
@@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Prepare Export Center handoff metadata for mirror thin bundles and optionally schedule a run.
|
||||
# Usage (handoff only):
|
||||
# scripts/mirror/export-center-wire.sh
|
||||
# Usage (handoff + schedule when secrets exist):
|
||||
# EXPORT_CENTER_BASE_URL=https://export.example.com \
|
||||
# EXPORT_CENTER_TOKEN=token123 \
|
||||
# EXPORT_CENTER_TENANT=tenant-a \
|
||||
# EXPORT_CENTER_AUTO_SCHEDULE=1 \
|
||||
# scripts/mirror/export-center-wire.sh
|
||||
# Inputs:
|
||||
# - MILESTONE_PATH: path to milestone.json (default: out/mirror/thin/milestone.json)
|
||||
# - EXPORT_CENTER_OUT_DIR: output directory for handoff files (default: out/mirror/thin/export-center)
|
||||
# - EXPORT_CENTER_PROFILE_ID: profile identifier for the Export Center run (default: mirror:thin)
|
||||
# - EXPORT_CENTER_TARGETS_JSON: override targets array sent to Export Center (JSON array string)
|
||||
# - EXPORT_CENTER_FORMATS_JSON: override formats array (JSON array string; default: ["tar.gz","json","dsse"])
|
||||
# - EXPORT_CENTER_AUTO_SCHEDULE: when "1", schedule a run using schedule-export-center-run.sh
|
||||
# - EXPORT_CENTER_BASE_URL / EXPORT_CENTER_TENANT / EXPORT_CENTER_PROJECT / EXPORT_CENTER_TOKEN: forwarded to scheduler
|
||||
# - EXPORT_CENTER_AUDIT_LOG: optional override for scheduler audit log path
|
||||
|
||||
MILESTONE_PATH="${MILESTONE_PATH:-out/mirror/thin/milestone.json}"
|
||||
OUT_DIR="${EXPORT_CENTER_OUT_DIR:-out/mirror/thin/export-center}"
|
||||
PROFILE_ID="${EXPORT_CENTER_PROFILE_ID:-mirror:thin}"
|
||||
FORMATS_JSON="${EXPORT_CENTER_FORMATS_JSON:-[\"tar.gz\",\"json\",\"dsse\"]}"
|
||||
AUTO_SCHEDULE="${EXPORT_CENTER_AUTO_SCHEDULE:-0}"
|
||||
|
||||
HANDOFF_PATH="${OUT_DIR}/export-center-handoff.json"
|
||||
TARGETS_PATH="${OUT_DIR}/export-center-targets.json"
|
||||
RESPONSE_PATH="${OUT_DIR}/schedule-response.json"
|
||||
|
||||
export HANDOFF_PATH TARGETS_PATH RESPONSE_PATH PROFILE_ID MILESTONE_PATH
|
||||
|
||||
mkdir -p "${OUT_DIR}"
|
||||
|
||||
PROFILE_ID="${PROFILE_ID}" MILESTONE_PATH="${MILESTONE_PATH}" HANDOFF_PATH="${HANDOFF_PATH}" TARGETS_PATH="${TARGETS_PATH}" python3 - <<'PY'
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import Dict, Any
|
||||
|
||||
milestone_path = os.environ["MILESTONE_PATH"]
|
||||
handoff_path = os.environ["HANDOFF_PATH"]
|
||||
targets_path = os.environ["TARGETS_PATH"]
|
||||
profile = os.environ.get("PROFILE_ID", "mirror:thin")
|
||||
|
||||
try:
|
||||
with open(milestone_path, encoding="utf-8") as f:
|
||||
milestone = json.load(f)
|
||||
except FileNotFoundError:
|
||||
print(f"milestone file not found: {milestone_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
artifacts = []
|
||||
|
||||
def add_artifact(name: str, entry: Dict[str, Any] | None) -> None:
|
||||
if not isinstance(entry, dict):
|
||||
return
|
||||
path = entry.get("path")
|
||||
sha = entry.get("sha256")
|
||||
if path and sha:
|
||||
artifacts.append({"name": name, "path": path, "sha256": sha})
|
||||
|
||||
add_artifact("manifest", milestone.get("manifest"))
|
||||
add_artifact("manifest_dsse", milestone.get("dsse"))
|
||||
add_artifact("bundle", milestone.get("tarball"))
|
||||
add_artifact("bundle_meta", milestone.get("bundle"))
|
||||
add_artifact("bundle_meta_dsse", milestone.get("bundle_dsse"))
|
||||
add_artifact("time_anchor", milestone.get("time_anchor"))
|
||||
|
||||
for name, entry in sorted((milestone.get("policies") or {}).items()):
|
||||
add_artifact(f"policy_{name}", entry)
|
||||
|
||||
handoff = {
|
||||
"profileId": profile,
|
||||
"generatedAt": datetime.datetime.now(datetime.timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z"),
|
||||
"sourceMilestone": os.path.abspath(milestone_path),
|
||||
"artifacts": artifacts,
|
||||
}
|
||||
|
||||
with open(handoff_path, "w", encoding="utf-8") as f:
|
||||
json.dump(handoff, f, indent=2)
|
||||
|
||||
with open(targets_path, "w", encoding="utf-8") as f:
|
||||
json.dump([a["name"] for a in artifacts], f)
|
||||
PY
|
||||
|
||||
ARTIFACTS_JSON=$(python3 - <<'PY'
|
||||
import json
|
||||
import os
|
||||
with open(os.environ["HANDOFF_PATH"], encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(json.dumps(data.get("artifacts", [])))
|
||||
PY
|
||||
)
|
||||
ARTIFACTS_JSON="${ARTIFACTS_JSON//$'\n'/}"
|
||||
|
||||
TARGETS_JSON_DEFAULT=$(tr -d '\r\n' < "${TARGETS_PATH}")
|
||||
TARGETS_JSON="${EXPORT_CENTER_TARGETS_JSON:-$TARGETS_JSON_DEFAULT}"
|
||||
|
||||
echo "[info] Export Center handoff written to ${HANDOFF_PATH}"
|
||||
echo "[info] Recommended targets: ${TARGETS_JSON}"
|
||||
|
||||
schedule_note="AUTO_SCHEDULE=0"
|
||||
if [[ "${AUTO_SCHEDULE}" == "1" ]]; then
|
||||
schedule_note="missing EXPORT_CENTER_BASE_URL"
|
||||
if [[ -n "${EXPORT_CENTER_BASE_URL:-}" ]]; then
|
||||
export EXPORT_CENTER_ARTIFACTS_JSON="${ARTIFACTS_JSON}"
|
||||
schedule_note="scheduled"
|
||||
bash src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh "${PROFILE_ID}" "${TARGETS_JSON}" "${FORMATS_JSON}" | tee "${RESPONSE_PATH}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ! -f "${RESPONSE_PATH}" ]]; then
|
||||
cat > "${RESPONSE_PATH}" <<JSON
|
||||
{"scheduled": false, "reason": "${schedule_note}"}
|
||||
JSON
|
||||
fi
|
||||
|
||||
echo "[info] Scheduler response captured at ${RESPONSE_PATH}"
|
||||
45
devops/tools/mirror/mirror-create.sh
Normal file
45
devops/tools/mirror/mirror-create.sh
Normal file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Deterministic wrapper for building mirror-thin-v1 bundles.
|
||||
# Usage: mirror-create.sh [--out out/mirror/thin] [--sign-key path.pem] [--oci] [--time-anchor path.json]
|
||||
|
||||
OUT="out/mirror/thin"
|
||||
SIGN_KEY=""
|
||||
TIME_ANCHOR=""
|
||||
OCI=0
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 [--out <dir>] [--sign-key key.pem] [--oci] [--time-anchor path.json]" >&2
|
||||
exit 2
|
||||
}
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--out) OUT=${2:-}; shift ;;
|
||||
--sign-key) SIGN_KEY=${2:-}; shift ;;
|
||||
--time-anchor) TIME_ANCHOR=${2:-}; shift ;;
|
||||
--oci) OCI=1 ;;
|
||||
*) usage ;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
ROOT=$(cd "$(dirname "$0")/.." && pwd)
|
||||
pushd "$ROOT/.." >/dev/null
|
||||
|
||||
export SIGN_KEY
|
||||
export TIME_ANCHOR_FILE=${TIME_ANCHOR:-}
|
||||
export OCI
|
||||
export OUT
|
||||
|
||||
src/Mirror/StellaOps.Mirror.Creator/make-thin-v1.sh
|
||||
|
||||
echo "Bundle built under $OUT"
|
||||
python scripts/mirror/verify_thin_bundle.py \
|
||||
"$OUT/mirror-thin-v1.manifest.json" \
|
||||
"$OUT/mirror-thin-v1.tar.gz" \
|
||||
--bundle-meta "$OUT/mirror-thin-v1.bundle.json"
|
||||
|
||||
popd >/dev/null
|
||||
echo "Create/verify completed"
|
||||
37
devops/tools/mirror/mirror-verify.sh
Normal file
37
devops/tools/mirror/mirror-verify.sh
Normal file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Verify a mirror-thin-v1 bundle and optional DSSE signatures.
|
||||
# Usage: mirror-verify.sh manifest.json bundle.tar.gz [--bundle-meta bundle.json] [--pubkey key.pub] [--tenant t] [--environment env]
|
||||
|
||||
manifest=${1:-}
|
||||
bundle=${2:-}
|
||||
shift 2 || true
|
||||
|
||||
bundle_meta=""
|
||||
pubkey=""
|
||||
tenant=""
|
||||
environment=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--bundle-meta) bundle_meta=${2:-}; shift ;;
|
||||
--pubkey) pubkey=${2:-}; shift ;;
|
||||
--tenant) tenant=${2:-}; shift ;;
|
||||
--environment) environment=${2:-}; shift ;;
|
||||
*) echo "Unknown arg $1" >&2; exit 2 ;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
[[ -z "$manifest" || -z "$bundle" ]] && { echo "manifest and bundle required" >&2; exit 2; }
|
||||
|
||||
args=("$manifest" "$bundle")
|
||||
[[ -n "$bundle_meta" ]] && args+=("--bundle-meta" "$bundle_meta")
|
||||
[[ -n "$pubkey" ]] && args+=("--pubkey" "$pubkey")
|
||||
[[ -n "$tenant" ]] && args+=("--tenant" "$tenant")
|
||||
[[ -n "$environment" ]] && args+=("--environment" "$environment")
|
||||
|
||||
python scripts/mirror/verify_thin_bundle.py "${args[@]}"
|
||||
|
||||
echo "Mirror bundle verification passed."
|
||||
105
devops/tools/mirror/sign_thin_bundle.py
Normal file
105
devops/tools/mirror/sign_thin_bundle.py
Normal file
@@ -0,0 +1,105 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Sign mirror-thin-v1 artefacts using an Ed25519 key and emit DSSE + TUF signatures.
|
||||
|
||||
Usage:
|
||||
python scripts/mirror/sign_thin_bundle.py \
|
||||
--key out/mirror/thin/tuf/keys/mirror-ed25519-test-1.pem \
|
||||
--manifest out/mirror/thin/mirror-thin-v1.manifest.json \
|
||||
--tar out/mirror/thin/mirror-thin-v1.tar.gz \
|
||||
--tuf-dir out/mirror/thin/tuf \
|
||||
--time-anchor out/mirror/thin/stage-v1/layers/time-anchor.json
|
||||
|
||||
Writes:
|
||||
- mirror-thin-v1.manifest.dsse.json
|
||||
- mirror-thin-v1.bundle.dsse.json (optional, when --bundle is provided)
|
||||
- updates signatures in root.json, targets.json, snapshot.json, timestamp.json
|
||||
"""
|
||||
import argparse, base64, json, pathlib, hashlib
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey
|
||||
|
||||
def b64url(data: bytes) -> str:
|
||||
return base64.urlsafe_b64encode(data).rstrip(b"=").decode()
|
||||
|
||||
def load_key(path: pathlib.Path) -> Ed25519PrivateKey:
|
||||
return serialization.load_pem_private_key(path.read_bytes(), password=None)
|
||||
|
||||
def keyid_from_pub(pub_path: pathlib.Path) -> str:
|
||||
raw = pub_path.read_bytes()
|
||||
return hashlib.sha256(raw).hexdigest()
|
||||
|
||||
def sign_bytes(key: Ed25519PrivateKey, data: bytes) -> bytes:
|
||||
return key.sign(data)
|
||||
|
||||
def write_json(path: pathlib.Path, obj):
|
||||
path.write_text(json.dumps(obj, indent=2, sort_keys=True) + "\n")
|
||||
|
||||
def sign_tuf(path: pathlib.Path, keyid: str, key: Ed25519PrivateKey):
|
||||
data = path.read_bytes()
|
||||
sig = sign_bytes(key, data)
|
||||
obj = json.loads(data)
|
||||
obj["signatures"] = [{"keyid": keyid, "sig": b64url(sig)}]
|
||||
write_json(path, obj)
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument("--key", required=True, type=pathlib.Path)
|
||||
ap.add_argument("--manifest", required=True, type=pathlib.Path)
|
||||
ap.add_argument("--tar", required=True, type=pathlib.Path)
|
||||
ap.add_argument("--tuf-dir", required=True, type=pathlib.Path)
|
||||
ap.add_argument("--bundle", required=False, type=pathlib.Path)
|
||||
ap.add_argument("--time-anchor", required=False, type=pathlib.Path)
|
||||
args = ap.parse_args()
|
||||
|
||||
key = load_key(args.key)
|
||||
pub_path = args.key.with_suffix(".pub")
|
||||
keyid = keyid_from_pub(pub_path)
|
||||
|
||||
manifest_bytes = args.manifest.read_bytes()
|
||||
sig = sign_bytes(key, manifest_bytes)
|
||||
dsse = {
|
||||
"payloadType": "application/vnd.stellaops.mirror.manifest+json",
|
||||
"payload": b64url(manifest_bytes),
|
||||
"signatures": [{"keyid": keyid, "sig": b64url(sig)}],
|
||||
}
|
||||
dsse_path = args.manifest.with_suffix(".dsse.json")
|
||||
write_json(dsse_path, dsse)
|
||||
|
||||
if args.bundle:
|
||||
bundle_bytes = args.bundle.read_bytes()
|
||||
bundle_sig = sign_bytes(key, bundle_bytes)
|
||||
bundle_dsse = {
|
||||
"payloadType": "application/vnd.stellaops.mirror.bundle+json",
|
||||
"payload": b64url(bundle_bytes),
|
||||
"signatures": [{"keyid": keyid, "sig": b64url(bundle_sig)}],
|
||||
}
|
||||
bundle_dsse_path = args.bundle.with_suffix(".dsse.json")
|
||||
write_json(bundle_dsse_path, bundle_dsse)
|
||||
|
||||
anchor_dsse_path = None
|
||||
if args.time_anchor:
|
||||
anchor_bytes = args.time_anchor.read_bytes()
|
||||
anchor_sig = sign_bytes(key, anchor_bytes)
|
||||
anchor_dsse = {
|
||||
"payloadType": "application/vnd.stellaops.time-anchor+json",
|
||||
"payload": b64url(anchor_bytes),
|
||||
"signatures": [{"keyid": keyid, "sig": b64url(anchor_sig)}],
|
||||
}
|
||||
anchor_dsse_path = args.time_anchor.with_suffix(".dsse.json")
|
||||
write_json(anchor_dsse_path, anchor_dsse)
|
||||
|
||||
# update TUF metadata
|
||||
for name in ["root.json", "targets.json", "snapshot.json", "timestamp.json"]:
|
||||
sign_tuf(args.tuf_dir / name, keyid, key)
|
||||
|
||||
parts = [f"manifest DSSE -> {dsse_path}"]
|
||||
if args.bundle:
|
||||
parts.append(f"bundle DSSE -> {bundle_dsse_path}")
|
||||
if anchor_dsse_path:
|
||||
parts.append(f"time anchor DSSE -> {anchor_dsse_path}")
|
||||
parts.append("TUF metadata updated")
|
||||
print(f"Signed DSSE + TUF using keyid {keyid}; " + ", ".join(parts))
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
77
devops/tools/mirror/verify_oci_layout.py
Normal file
77
devops/tools/mirror/verify_oci_layout.py
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Verify OCI layout emitted by make-thin-v1.sh when OCI=1.
|
||||
Checks:
|
||||
1) oci-layout exists and version is 1.0.0
|
||||
2) index.json manifest digest/size match manifest.json hash/size
|
||||
3) manifest.json references config/layers present in blobs with matching sha256 and size
|
||||
|
||||
Usage:
|
||||
python scripts/mirror/verify_oci_layout.py out/mirror/thin/oci
|
||||
|
||||
Exit 0 on success, non-zero on failure with message.
|
||||
"""
|
||||
import hashlib, json, pathlib, sys
|
||||
|
||||
def sha256(path: pathlib.Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with path.open('rb') as f:
|
||||
for chunk in iter(lambda: f.read(8192), b''):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 2:
|
||||
print(__doc__)
|
||||
sys.exit(2)
|
||||
root = pathlib.Path(sys.argv[1])
|
||||
layout = root / "oci-layout"
|
||||
index = root / "index.json"
|
||||
manifest = root / "manifest.json"
|
||||
if not layout.exists() or not index.exists() or not manifest.exists():
|
||||
raise SystemExit("missing oci-layout/index.json/manifest.json")
|
||||
|
||||
layout_obj = json.loads(layout.read_text())
|
||||
if layout_obj.get("imageLayoutVersion") != "1.0.0":
|
||||
raise SystemExit("oci-layout version not 1.0.0")
|
||||
|
||||
idx_obj = json.loads(index.read_text())
|
||||
if not idx_obj.get("manifests"):
|
||||
raise SystemExit("index.json manifests empty")
|
||||
man_digest = idx_obj["manifests"][0]["digest"]
|
||||
man_size = idx_obj["manifests"][0]["size"]
|
||||
|
||||
actual_man_sha = sha256(manifest)
|
||||
if man_digest != f"sha256:{actual_man_sha}":
|
||||
raise SystemExit(f"manifest digest mismatch: {man_digest} vs sha256:{actual_man_sha}")
|
||||
if man_size != manifest.stat().st_size:
|
||||
raise SystemExit("manifest size mismatch")
|
||||
|
||||
man_obj = json.loads(manifest.read_text())
|
||||
blobs = root / "blobs" / "sha256"
|
||||
# config
|
||||
cfg_digest = man_obj["config"]["digest"].split(":",1)[1]
|
||||
cfg_size = man_obj["config"]["size"]
|
||||
cfg_path = blobs / cfg_digest
|
||||
if not cfg_path.exists():
|
||||
raise SystemExit(f"config blob missing: {cfg_path}")
|
||||
if cfg_path.stat().st_size != cfg_size:
|
||||
raise SystemExit("config size mismatch")
|
||||
if sha256(cfg_path) != cfg_digest:
|
||||
raise SystemExit("config digest mismatch")
|
||||
|
||||
for layer in man_obj.get("layers", []):
|
||||
ldigest = layer["digest"].split(":",1)[1]
|
||||
lsize = layer["size"]
|
||||
lpath = blobs / ldigest
|
||||
if not lpath.exists():
|
||||
raise SystemExit(f"layer blob missing: {lpath}")
|
||||
if lpath.stat().st_size != lsize:
|
||||
raise SystemExit("layer size mismatch")
|
||||
if sha256(lpath) != ldigest:
|
||||
raise SystemExit("layer digest mismatch")
|
||||
|
||||
print("OK: OCI layout verified")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
293
devops/tools/mirror/verify_thin_bundle.py
Normal file
293
devops/tools/mirror/verify_thin_bundle.py
Normal file
@@ -0,0 +1,293 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Verifier for mirror-thin-v1 artefacts and bundle meta.
|
||||
|
||||
Checks:
|
||||
1) SHA256 of manifest/tarball (and optional bundle meta) matches sidecars.
|
||||
2) Manifest schema contains required fields and required layer files exist.
|
||||
3) Tarball headers deterministic (sorted paths, uid/gid=0, mtime=0).
|
||||
4) Tar contents match manifest digests.
|
||||
5) Optional: verify DSSE signatures for manifest/bundle when a public key is provided.
|
||||
6) Optional: validate bundle meta (tenant/env scope, policy hashes, gap coverage counts).
|
||||
|
||||
Usage:
|
||||
python scripts/mirror/verify_thin_bundle.py \
|
||||
out/mirror/thin/mirror-thin-v1.manifest.json \
|
||||
out/mirror/thin/mirror-thin-v1.tar.gz \
|
||||
--bundle-meta out/mirror/thin/mirror-thin-v1.bundle.json \
|
||||
--pubkey out/mirror/thin/tuf/keys/ci-ed25519.pub \
|
||||
--tenant tenant-demo --environment lab
|
||||
|
||||
Exit code 0 on success; non-zero on any check failure.
|
||||
"""
|
||||
import argparse
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
import tarfile
|
||||
from typing import Optional
|
||||
|
||||
try:
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PublicKey
|
||||
|
||||
CRYPTO_AVAILABLE = True
|
||||
except ImportError: # pragma: no cover - surfaced as runtime guidance
|
||||
CRYPTO_AVAILABLE = False
|
||||
|
||||
REQUIRED_FIELDS = ["version", "created", "layers", "indexes"]
|
||||
REQUIRED_LAYER_FILES = {
|
||||
"layers/observations.ndjson",
|
||||
"layers/time-anchor.json",
|
||||
"layers/transport-plan.json",
|
||||
"layers/rekor-policy.json",
|
||||
"layers/mirror-policy.json",
|
||||
"layers/offline-kit-policy.json",
|
||||
"layers/artifact-hashes.json",
|
||||
"indexes/observations.index",
|
||||
}
|
||||
|
||||
|
||||
def _b64url_decode(data: str) -> bytes:
|
||||
padding = "=" * (-len(data) % 4)
|
||||
return base64.urlsafe_b64decode(data + padding)
|
||||
|
||||
|
||||
def sha256_file(path: pathlib.Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with path.open("rb") as f:
|
||||
for chunk in iter(lambda: f.read(8192), b""):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def load_sha256_sidecar(path: pathlib.Path) -> str:
|
||||
sidecar = path.with_suffix(path.suffix + ".sha256")
|
||||
if not sidecar.exists():
|
||||
raise SystemExit(f"missing sidecar {sidecar}")
|
||||
return sidecar.read_text().strip().split()[0]
|
||||
|
||||
|
||||
def check_schema(manifest: dict):
|
||||
missing = [f for f in REQUIRED_FIELDS if f not in manifest]
|
||||
if missing:
|
||||
raise SystemExit(f"manifest missing fields: {missing}")
|
||||
|
||||
|
||||
def normalize(name: str) -> str:
|
||||
return name[2:] if name.startswith("./") else name
|
||||
|
||||
|
||||
def check_tar_determinism(tar_path: pathlib.Path):
|
||||
with tarfile.open(tar_path, "r:gz") as tf:
|
||||
names = [normalize(n) for n in tf.getnames()]
|
||||
if names != sorted(names):
|
||||
raise SystemExit("tar entries not sorted")
|
||||
for m in tf.getmembers():
|
||||
if m.uid != 0 or m.gid != 0:
|
||||
raise SystemExit(f"tar header uid/gid not zero for {m.name}")
|
||||
if m.mtime != 0:
|
||||
raise SystemExit(f"tar header mtime not zero for {m.name}")
|
||||
|
||||
|
||||
def check_required_layers(tar_path: pathlib.Path):
|
||||
with tarfile.open(tar_path, "r:gz") as tf:
|
||||
names = {normalize(n) for n in tf.getnames()}
|
||||
for required in REQUIRED_LAYER_FILES:
|
||||
if required not in names:
|
||||
raise SystemExit(f"required file missing from bundle: {required}")
|
||||
|
||||
|
||||
def check_content_hashes(manifest: dict, tar_path: pathlib.Path):
|
||||
with tarfile.open(tar_path, "r:gz") as tf:
|
||||
def get(name: str):
|
||||
try:
|
||||
return tf.getmember(name)
|
||||
except KeyError:
|
||||
return tf.getmember(f"./{name}")
|
||||
for layer in manifest.get("layers", []):
|
||||
name = layer["path"]
|
||||
info = get(name)
|
||||
data = tf.extractfile(info).read()
|
||||
digest = hashlib.sha256(data).hexdigest()
|
||||
if layer["digest"] != f"sha256:{digest}":
|
||||
raise SystemExit(f"layer digest mismatch {name}: {digest}")
|
||||
for idx in manifest.get("indexes", []):
|
||||
name = idx['name']
|
||||
if not name.startswith("indexes/"):
|
||||
name = f"indexes/{name}"
|
||||
info = get(name)
|
||||
data = tf.extractfile(info).read()
|
||||
digest = hashlib.sha256(data).hexdigest()
|
||||
if idx["digest"] != f"sha256:{digest}":
|
||||
raise SystemExit(f"index digest mismatch {name}: {digest}")
|
||||
|
||||
|
||||
def read_tar_entry(tar_path: pathlib.Path, name: str) -> bytes:
|
||||
with tarfile.open(tar_path, "r:gz") as tf:
|
||||
try:
|
||||
info = tf.getmember(name)
|
||||
except KeyError:
|
||||
info = tf.getmember(f"./{name}")
|
||||
data = tf.extractfile(info).read()
|
||||
return data
|
||||
|
||||
|
||||
def load_pubkey(path: pathlib.Path) -> Ed25519PublicKey:
|
||||
if not CRYPTO_AVAILABLE:
|
||||
raise SystemExit("cryptography is required for DSSE verification; install before using --pubkey")
|
||||
return serialization.load_pem_public_key(path.read_bytes())
|
||||
|
||||
|
||||
def verify_dsse(dsse_path: pathlib.Path, pubkey_path: pathlib.Path, expected_payload: pathlib.Path, expected_type: str):
|
||||
dsse_obj = json.loads(dsse_path.read_text())
|
||||
if dsse_obj.get("payloadType") != expected_type:
|
||||
raise SystemExit(f"DSSE payloadType mismatch for {dsse_path}")
|
||||
payload = _b64url_decode(dsse_obj.get("payload", ""))
|
||||
if payload != expected_payload.read_bytes():
|
||||
raise SystemExit(f"DSSE payload mismatch for {dsse_path}")
|
||||
sigs = dsse_obj.get("signatures") or []
|
||||
if not sigs:
|
||||
raise SystemExit(f"DSSE missing signatures: {dsse_path}")
|
||||
pub = load_pubkey(pubkey_path)
|
||||
try:
|
||||
pub.verify(_b64url_decode(sigs[0]["sig"]), payload)
|
||||
except Exception as exc: # pragma: no cover - cryptography raises InvalidSignature
|
||||
raise SystemExit(f"DSSE signature verification failed for {dsse_path}: {exc}")
|
||||
|
||||
|
||||
def check_bundle_meta(meta_path: pathlib.Path, manifest_path: pathlib.Path, tar_path: pathlib.Path, tenant: Optional[str], environment: Optional[str]):
|
||||
meta = json.loads(meta_path.read_text())
|
||||
for field in ["bundle", "version", "artifacts", "gaps", "tooling"]:
|
||||
if field not in meta:
|
||||
raise SystemExit(f"bundle meta missing field {field}")
|
||||
if tenant and meta.get("tenant") != tenant:
|
||||
raise SystemExit(f"bundle tenant mismatch: {meta.get('tenant')} != {tenant}")
|
||||
if environment and meta.get("environment") != environment:
|
||||
raise SystemExit(f"bundle environment mismatch: {meta.get('environment')} != {environment}")
|
||||
|
||||
artifacts = meta["artifacts"]
|
||||
|
||||
def expect(name: str, path: pathlib.Path):
|
||||
recorded = artifacts.get(name)
|
||||
if not recorded:
|
||||
raise SystemExit(f"bundle meta missing artifact entry: {name}")
|
||||
expected = recorded.get("sha256")
|
||||
if expected and expected != sha256_file(path):
|
||||
raise SystemExit(f"bundle meta digest mismatch for {name}")
|
||||
|
||||
expect("manifest", manifest_path)
|
||||
expect("tarball", tar_path)
|
||||
# DSSE sidecars are optional but if present, validate hashes
|
||||
dsse_manifest = artifacts.get("manifest_dsse")
|
||||
if dsse_manifest and dsse_manifest.get("path"):
|
||||
expect("manifest_dsse", meta_path.parent / dsse_manifest["path"])
|
||||
dsse_bundle = artifacts.get("bundle_dsse")
|
||||
if dsse_bundle and dsse_bundle.get("path"):
|
||||
expect("bundle_dsse", meta_path.parent / dsse_bundle["path"])
|
||||
dsse_anchor = artifacts.get("time_anchor_dsse")
|
||||
if dsse_anchor and dsse_anchor.get("path"):
|
||||
expect("time_anchor_dsse", meta_path.parent / dsse_anchor["path"])
|
||||
for extra in ["time_anchor", "transport_plan", "rekor_policy", "mirror_policy", "offline_policy", "artifact_hashes"]:
|
||||
rec = artifacts.get(extra)
|
||||
if not rec:
|
||||
raise SystemExit(f"bundle meta missing artifact entry: {extra}")
|
||||
if not rec.get("path"):
|
||||
raise SystemExit(f"bundle meta missing path for {extra}")
|
||||
|
||||
time_anchor_dsse = artifacts.get("time_anchor_dsse")
|
||||
if time_anchor_dsse:
|
||||
if not time_anchor_dsse.get("path"):
|
||||
raise SystemExit("bundle meta missing path for time_anchor_dsse")
|
||||
if not (meta_path.parent / time_anchor_dsse["path"]).exists():
|
||||
raise SystemExit("time_anchor_dsse referenced but file missing")
|
||||
|
||||
for group, expected_count in [("ok", 10), ("rk", 10), ("ms", 10)]:
|
||||
if len(meta.get("gaps", {}).get(group, [])) != expected_count:
|
||||
raise SystemExit(f"bundle meta gaps.{group} expected {expected_count} entries")
|
||||
|
||||
root_guess = manifest_path.parents[3] if len(manifest_path.parents) > 3 else manifest_path.parents[-1]
|
||||
tool_expectations = {
|
||||
'make_thin_v1_sh': root_guess / 'src' / 'Mirror' / 'StellaOps.Mirror.Creator' / 'make-thin-v1.sh',
|
||||
'sign_script': root_guess / 'scripts' / 'mirror' / 'sign_thin_bundle.py',
|
||||
'verify_script': root_guess / 'scripts' / 'mirror' / 'verify_thin_bundle.py',
|
||||
'verify_oci': root_guess / 'scripts' / 'mirror' / 'verify_oci_layout.py'
|
||||
}
|
||||
for key, path in tool_expectations.items():
|
||||
recorded = meta['tooling'].get(key)
|
||||
if not recorded:
|
||||
raise SystemExit(f"tool hash missing for {key}")
|
||||
actual = sha256_file(path)
|
||||
if recorded != actual:
|
||||
raise SystemExit(f"tool hash mismatch for {key}")
|
||||
|
||||
if meta.get("checkpoint_freshness_seconds", 0) <= 0:
|
||||
raise SystemExit("checkpoint_freshness_seconds must be positive")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("manifest", type=pathlib.Path)
|
||||
parser.add_argument("tar", type=pathlib.Path)
|
||||
parser.add_argument("--bundle-meta", type=pathlib.Path)
|
||||
parser.add_argument("--pubkey", type=pathlib.Path)
|
||||
parser.add_argument("--tenant", type=str)
|
||||
parser.add_argument("--environment", type=str)
|
||||
args = parser.parse_args()
|
||||
|
||||
manifest_path = args.manifest
|
||||
tar_path = args.tar
|
||||
bundle_meta = args.bundle_meta
|
||||
bundle_dsse = bundle_meta.with_suffix(".dsse.json") if bundle_meta else None
|
||||
manifest_dsse = manifest_path.with_suffix(".dsse.json")
|
||||
time_anchor_dsse = None
|
||||
time_anchor_path = tar_path.parent / "stage-v1" / "layers" / "time-anchor.json"
|
||||
|
||||
man_expected = load_sha256_sidecar(manifest_path)
|
||||
tar_expected = load_sha256_sidecar(tar_path)
|
||||
if sha256_file(manifest_path) != man_expected:
|
||||
raise SystemExit("manifest sha256 mismatch")
|
||||
if sha256_file(tar_path) != tar_expected:
|
||||
raise SystemExit("tarball sha256 mismatch")
|
||||
|
||||
manifest = json.loads(manifest_path.read_text())
|
||||
check_schema(manifest)
|
||||
check_tar_determinism(tar_path)
|
||||
check_required_layers(tar_path)
|
||||
check_content_hashes(manifest, tar_path)
|
||||
|
||||
if bundle_meta:
|
||||
if not bundle_meta.exists():
|
||||
raise SystemExit(f"bundle meta missing: {bundle_meta}")
|
||||
meta_expected = load_sha256_sidecar(bundle_meta)
|
||||
if sha256_file(bundle_meta) != meta_expected:
|
||||
raise SystemExit("bundle meta sha256 mismatch")
|
||||
check_bundle_meta(bundle_meta, manifest_path, tar_path, args.tenant, args.environment)
|
||||
meta = json.loads(bundle_meta.read_text())
|
||||
ta_entry = meta.get("artifacts", {}).get("time_anchor_dsse")
|
||||
if ta_entry and ta_entry.get("path"):
|
||||
ta_path = bundle_meta.parent / ta_entry["path"]
|
||||
if sha256_file(ta_path) != ta_entry.get("sha256"):
|
||||
raise SystemExit("time_anchor_dsse sha256 mismatch")
|
||||
time_anchor_dsse = ta_path
|
||||
|
||||
if args.pubkey:
|
||||
pubkey = args.pubkey
|
||||
if manifest_dsse.exists():
|
||||
verify_dsse(manifest_dsse, pubkey, manifest_path, "application/vnd.stellaops.mirror.manifest+json")
|
||||
if bundle_dsse and bundle_dsse.exists():
|
||||
verify_dsse(bundle_dsse, pubkey, bundle_meta, "application/vnd.stellaops.mirror.bundle+json")
|
||||
if time_anchor_dsse and time_anchor_dsse.exists() and time_anchor_path.exists():
|
||||
anchor_bytes = read_tar_entry(tar_path, "layers/time-anchor.json")
|
||||
tmp_anchor = tar_path.parent / "time-anchor.verify.json"
|
||||
tmp_anchor.write_bytes(anchor_bytes)
|
||||
verify_dsse(time_anchor_dsse, pubkey, tmp_anchor, "application/vnd.stellaops.time-anchor+json")
|
||||
tmp_anchor.unlink(missing_ok=True)
|
||||
|
||||
print("OK: mirror-thin bundle verified")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
143
devops/tools/notifications/sign-dsse.py
Normal file
143
devops/tools/notifications/sign-dsse.py
Normal file
@@ -0,0 +1,143 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DSSE signing utility for notification schemas and offline kit manifests.
|
||||
|
||||
Uses HMAC-SHA256 with Pre-Authentication Encoding (PAE) per DSSE spec.
|
||||
Development key: etc/secrets/dsse-dev.signing.json
|
||||
CI/Production: Use secrets.COSIGN_KEY_REF or equivalent HSM-backed key.
|
||||
|
||||
Usage:
|
||||
python scripts/notifications/sign-dsse.py <input.dsse.json> [--key <key-file>] [--output <output.dsse.json>]
|
||||
python scripts/notifications/sign-dsse.py docs/notifications/schemas/notify-schemas-catalog.dsse.json
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import struct
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def build_pae(payload_type: str, payload_bytes: bytes) -> bytes:
|
||||
"""Build Pre-Authentication Encoding per DSSE spec."""
|
||||
prefix = b"DSSEv1"
|
||||
type_bytes = payload_type.encode("utf-8") if payload_type else b""
|
||||
|
||||
# PAE format: "DSSEv1" + count(2) + len(type) + type + len(payload) + payload
|
||||
pae = (
|
||||
prefix +
|
||||
struct.pack(">Q", 2) + # count = 2 (type + payload)
|
||||
struct.pack(">Q", len(type_bytes)) +
|
||||
type_bytes +
|
||||
struct.pack(">Q", len(payload_bytes)) +
|
||||
payload_bytes
|
||||
)
|
||||
return pae
|
||||
|
||||
|
||||
def compute_hmac_signature(secret_b64: str, pae: bytes) -> str:
|
||||
"""Compute HMAC-SHA256 signature and return base64."""
|
||||
secret_bytes = base64.b64decode(secret_b64)
|
||||
signature = hmac.new(secret_bytes, pae, hashlib.sha256).digest()
|
||||
return base64.b64encode(signature).decode("utf-8")
|
||||
|
||||
|
||||
def load_key(key_path: Path) -> dict:
|
||||
"""Load signing key from JSON file."""
|
||||
with open(key_path, "r", encoding="utf-8") as f:
|
||||
key_data = json.load(f)
|
||||
|
||||
required = ["keyId", "secret", "algorithm"]
|
||||
for field in required:
|
||||
if field not in key_data:
|
||||
raise ValueError(f"Key file missing required field: {field}")
|
||||
|
||||
if key_data["algorithm"].upper() != "HMACSHA256":
|
||||
raise ValueError(f"Unsupported algorithm: {key_data['algorithm']}")
|
||||
|
||||
return key_data
|
||||
|
||||
|
||||
def sign_dsse(input_path: Path, key_data: dict, output_path: Path | None = None) -> dict:
|
||||
"""Sign a DSSE envelope file."""
|
||||
with open(input_path, "r", encoding="utf-8") as f:
|
||||
envelope = json.load(f)
|
||||
|
||||
if "payloadType" not in envelope or "payload" not in envelope:
|
||||
raise ValueError("Input file is not a valid DSSE envelope (missing payloadType or payload)")
|
||||
|
||||
payload_type = envelope["payloadType"]
|
||||
payload_b64 = envelope["payload"]
|
||||
payload_bytes = base64.b64decode(payload_b64)
|
||||
|
||||
# Build PAE and compute signature
|
||||
pae = build_pae(payload_type, payload_bytes)
|
||||
signature = compute_hmac_signature(key_data["secret"], pae)
|
||||
|
||||
# Create signature object
|
||||
sig_obj = {
|
||||
"sig": signature,
|
||||
"keyid": key_data["keyId"]
|
||||
}
|
||||
|
||||
# Add timestamp if not already present
|
||||
if "signedAt" not in sig_obj:
|
||||
sig_obj["signedAt"] = datetime.now(timezone.utc).isoformat(timespec="seconds")
|
||||
|
||||
# Update envelope with signature
|
||||
if "signatures" not in envelope or not envelope["signatures"]:
|
||||
envelope["signatures"] = []
|
||||
|
||||
# Remove any existing signature with the same keyId
|
||||
envelope["signatures"] = [s for s in envelope["signatures"] if s.get("keyid") != key_data["keyId"]]
|
||||
envelope["signatures"].append(sig_obj)
|
||||
|
||||
# Remove note field if present (was a placeholder)
|
||||
envelope.pop("note", None)
|
||||
|
||||
# Write output
|
||||
out_path = output_path or input_path
|
||||
with open(out_path, "w", encoding="utf-8") as f:
|
||||
json.dump(envelope, f, indent=2, ensure_ascii=False)
|
||||
f.write("\n")
|
||||
|
||||
return envelope
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Sign DSSE envelope files with HMAC-SHA256")
|
||||
parser.add_argument("input", type=Path, help="Input DSSE envelope file")
|
||||
parser.add_argument("--key", "-k", type=Path,
|
||||
default=Path("etc/secrets/dsse-dev.signing.json"),
|
||||
help="Signing key JSON file (default: etc/secrets/dsse-dev.signing.json)")
|
||||
parser.add_argument("--output", "-o", type=Path, help="Output file (default: overwrite input)")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.input.exists():
|
||||
print(f"Error: Input file not found: {args.input}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not args.key.exists():
|
||||
print(f"Error: Key file not found: {args.key}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
key_data = load_key(args.key)
|
||||
result = sign_dsse(args.input, key_data, args.output)
|
||||
out_path = args.output or args.input
|
||||
sig = result["signatures"][-1]
|
||||
print(f"Signed {args.input} with key {sig['keyid']}")
|
||||
print(f" Signature: {sig['sig'][:32]}...")
|
||||
print(f" Output: {out_path}")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
31
devops/tools/nuget-prime/mirror-packages.txt
Normal file
31
devops/tools/nuget-prime/mirror-packages.txt
Normal file
@@ -0,0 +1,31 @@
|
||||
AWSSDK.S3|3.7.305.6
|
||||
CycloneDX.Core|10.0.1
|
||||
Google.Protobuf|3.27.2
|
||||
Grpc.Net.Client|2.65.0
|
||||
Grpc.Tools|2.65.0
|
||||
Microsoft.Data.Sqlite|9.0.0-rc.1.24451.1
|
||||
Microsoft.Extensions.Configuration.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Configuration.Abstractions|9.0.0
|
||||
Microsoft.Extensions.Configuration.Binder|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.DependencyInjection.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.DependencyInjection.Abstractions|9.0.0
|
||||
Microsoft.Extensions.Diagnostics.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Diagnostics.HealthChecks|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Hosting.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Http.Polly|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Http|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Logging.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Logging.Abstractions|9.0.0
|
||||
Microsoft.Extensions.Options.ConfigurationExtensions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Options|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Options|9.0.0
|
||||
MongoDB.Driver|3.5.0
|
||||
NATS.Client.Core|2.0.0
|
||||
NATS.Client.JetStream|2.0.0
|
||||
RoaringBitmap|0.0.9
|
||||
Serilog.AspNetCore|8.0.1
|
||||
Serilog.Extensions.Hosting|8.0.0
|
||||
Serilog.Sinks.Console|5.0.1
|
||||
StackExchange.Redis|2.7.33
|
||||
System.Text.Json|10.0.0-preview.7.25380.108
|
||||
14
devops/tools/nuget-prime/nuget-prime-v9.csproj
Normal file
14
devops/tools/nuget-prime/nuget-prime-v9.csproj
Normal file
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<RestorePackagesPath>../../local-nugets/packages</RestorePackagesPath>
|
||||
<DisableImplicitFrameworkReferences>true</DisableImplicitFrameworkReferences>
|
||||
<EnableDefaultItems>false</EnableDefaultItems>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageDownload Include="Microsoft.Extensions.Configuration.Abstractions" Version="[9.0.0]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="[9.0.0]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Logging.Abstractions" Version="[9.0.0]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Options" Version="[9.0.0]" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
45
devops/tools/nuget-prime/nuget-prime.csproj
Normal file
45
devops/tools/nuget-prime/nuget-prime.csproj
Normal file
@@ -0,0 +1,45 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<RestorePackagesPath>../../local-nugets/packages</RestorePackagesPath>
|
||||
<DisableImplicitFrameworkReferences>true</DisableImplicitFrameworkReferences>
|
||||
<EnableDefaultItems>false</EnableDefaultItems>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageDownload Include="AWSSDK.Core" Version="[4.0.1.3]" />
|
||||
<PackageDownload Include="AWSSDK.KeyManagementService" Version="[4.0.6]" />
|
||||
<PackageDownload Include="AWSSDK.S3" Version="[3.7.305.6]" />
|
||||
<PackageDownload Include="CycloneDX.Core" Version="[10.0.2]" />
|
||||
<PackageDownload Include="Google.Protobuf" Version="[3.27.2]" />
|
||||
<PackageDownload Include="Grpc.Net.Client" Version="[2.65.0]" />
|
||||
<PackageDownload Include="Grpc.Tools" Version="[2.65.0]" />
|
||||
<PackageDownload Include="Microsoft.Data.Sqlite" Version="[9.0.0-rc.1.24451.1]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Configuration.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Configuration.Binder" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Diagnostics.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Hosting.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Http.Polly" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Http" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Logging.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Options" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="NATS.Client.Core" Version="[2.0.0]" />
|
||||
<PackageDownload Include="NATS.Client.JetStream" Version="[2.0.0]" />
|
||||
<PackageDownload Include="RoaringBitmap" Version="[0.0.9]" />
|
||||
<PackageDownload Include="Serilog.AspNetCore" Version="[8.0.1]" />
|
||||
<PackageDownload Include="Serilog.Extensions.Hosting" Version="[8.0.0]" />
|
||||
<PackageDownload Include="Serilog.Sinks.Console" Version="[5.0.1]" />
|
||||
<PackageDownload Include="StackExchange.Redis" Version="[2.8.37]" />
|
||||
<PackageDownload Include="System.Text.Json" Version="[10.0.0-preview.7.25380.108]" />
|
||||
<PackageDownload Include="Google.Api.CommonProtos" Version="[2.17.0]" />
|
||||
<PackageDownload Include="Google.Api.Gax" Version="[4.11.0]" />
|
||||
<PackageDownload Include="Google.Api.Gax.Grpc" Version="[4.11.0]" />
|
||||
<PackageDownload Include="Google.Api.Gax.Grpc.GrpcCore" Version="[4.11.0]" />
|
||||
<PackageDownload Include="Google.Apis" Version="[1.69.0]" />
|
||||
<PackageDownload Include="Google.Apis.Auth" Version="[1.69.0]" />
|
||||
<PackageDownload Include="Google.Apis.Core" Version="[1.64.0]" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
134
devops/tools/observability/incident-mode.sh
Normal file
134
devops/tools/observability/incident-mode.sh
Normal file
@@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Incident mode automation
|
||||
# - Enables a feature-flag JSON when burn rate crosses threshold
|
||||
# - Writes retention override parameters for downstream storage/ingest systems
|
||||
# - Resets automatically after a cooldown period once burn subsides
|
||||
# All inputs are provided via CLI flags or env vars to remain offline-friendly.
|
||||
|
||||
usage() {
|
||||
cat <<'USAGE'
|
||||
Usage: incident-mode.sh --burn-rate <float> [--threshold 2.0] [--reset-threshold 0.5] \
|
||||
[--state-dir out/incident-mode] [--retention-hours 24] \
|
||||
[--cooldown-mins 30] [--note "text"]
|
||||
|
||||
Environment overrides:
|
||||
INCIDENT_STATE_DIR default: out/incident-mode
|
||||
INCIDENT_THRESHOLD default: 2.0 (fast burn multiple)
|
||||
INCIDENT_RESET_TH default: 0.5 (burn multiple to exit)
|
||||
INCIDENT_COOLDOWN default: 30 (minutes below reset threshold)
|
||||
INCIDENT_RETENTION_H default: 24 (hours)
|
||||
|
||||
Outputs (in state dir):
|
||||
flag.json feature flag payload (enabled/disabled + metadata)
|
||||
retention.json retention override (hours, applied_at)
|
||||
last_burn.txt last burn rate observed
|
||||
cooldown.txt consecutive minutes below reset threshold
|
||||
|
||||
Examples:
|
||||
incident-mode.sh --burn-rate 3.1 --note "fast burn" # enter incident mode
|
||||
incident-mode.sh --burn-rate 0.2 # progress cooldown / exit
|
||||
USAGE
|
||||
}
|
||||
|
||||
if [[ $# -eq 0 ]]; then usage; exit 1; fi
|
||||
|
||||
BURN_RATE=""
|
||||
NOTE=""
|
||||
STATE_DIR=${INCIDENT_STATE_DIR:-out/incident-mode}
|
||||
THRESHOLD=${INCIDENT_THRESHOLD:-2.0}
|
||||
RESET_TH=${INCIDENT_RESET_TH:-0.5}
|
||||
COOLDOWN_MINS=${INCIDENT_COOLDOWN:-30}
|
||||
RETENTION_H=${INCIDENT_RETENTION_H:-24}
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--burn-rate) BURN_RATE="$2"; shift 2;;
|
||||
--threshold) THRESHOLD="$2"; shift 2;;
|
||||
--reset-threshold) RESET_TH="$2"; shift 2;;
|
||||
--state-dir) STATE_DIR="$2"; shift 2;;
|
||||
--retention-hours) RETENTION_H="$2"; shift 2;;
|
||||
--cooldown-mins) COOLDOWN_MINS="$2"; shift 2;;
|
||||
--note) NOTE="$2"; shift 2;;
|
||||
-h|--help) usage; exit 0;;
|
||||
*) echo "Unknown arg: $1" >&2; usage; exit 1;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "$BURN_RATE" ]]; then echo "--burn-rate is required" >&2; exit 1; fi
|
||||
mkdir -p "$STATE_DIR"
|
||||
FLAG_FILE="$STATE_DIR/flag.json"
|
||||
RET_FILE="$STATE_DIR/retention.json"
|
||||
LAST_FILE="$STATE_DIR/last_burn.txt"
|
||||
COOLDOWN_FILE="$STATE_DIR/cooldown.txt"
|
||||
|
||||
jq_escape() { python - <<PY "$1"
|
||||
import json,sys
|
||||
print(json.dumps(sys.argv[1]))
|
||||
PY
|
||||
}
|
||||
|
||||
now_utc=$(date -u +%Y-%m-%dT%H:%M:%SZ)
|
||||
burn_float=$(python - <<PY "$BURN_RATE"
|
||||
import sys
|
||||
print(float(sys.argv[1]))
|
||||
PY)
|
||||
|
||||
cooldown_current=0
|
||||
if [[ -f "$COOLDOWN_FILE" ]]; then
|
||||
cooldown_current=$(cat "$COOLDOWN_FILE")
|
||||
fi
|
||||
|
||||
enter_incident=false
|
||||
exit_incident=false
|
||||
|
||||
if (( $(echo "$burn_float >= $THRESHOLD" | bc -l) )); then
|
||||
enter_incident=true
|
||||
cooldown_current=0
|
||||
elif (( $(echo "$burn_float <= $RESET_TH" | bc -l) )); then
|
||||
cooldown_current=$((cooldown_current + 1))
|
||||
if (( cooldown_current >= COOLDOWN_MINS )); then
|
||||
exit_incident=true
|
||||
fi
|
||||
else
|
||||
cooldown_current=0
|
||||
fi
|
||||
|
||||
echo "$burn_float" > "$LAST_FILE"
|
||||
echo "$cooldown_current" > "$COOLDOWN_FILE"
|
||||
|
||||
write_flag() {
|
||||
local enabled="$1"
|
||||
cat > "$FLAG_FILE" <<JSON
|
||||
{
|
||||
"enabled": $enabled,
|
||||
"updated_at": "$now_utc",
|
||||
"reason": "incident-mode",
|
||||
"note": $(jq_escape "$NOTE"),
|
||||
"burn_rate": $burn_float
|
||||
}
|
||||
JSON
|
||||
}
|
||||
|
||||
if $enter_incident; then
|
||||
write_flag true
|
||||
cat > "$RET_FILE" <<JSON
|
||||
{
|
||||
"retention_hours": $RETENTION_H,
|
||||
"applied_at": "$now_utc"
|
||||
}
|
||||
JSON
|
||||
echo "incident-mode: activated (burn_rate=$burn_float)" >&2
|
||||
elif $exit_incident; then
|
||||
write_flag false
|
||||
echo "incident-mode: cleared after cooldown (burn_rate=$burn_float)" >&2
|
||||
else
|
||||
# no change; preserve prior flag if exists
|
||||
if [[ ! -f "$FLAG_FILE" ]]; then
|
||||
write_flag false
|
||||
fi
|
||||
echo "incident-mode: steady (burn_rate=$burn_float, cooldown=$cooldown_current/$COOLDOWN_MINS)" >&2
|
||||
fi
|
||||
|
||||
exit 0
|
||||
21
devops/tools/observability/slo-evaluator.sh
Normal file
21
devops/tools/observability/slo-evaluator.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# DEVOPS-OBS-51-001: simple SLO burn-rate evaluator
|
||||
|
||||
PROM_URL=${PROM_URL:-"http://localhost:9090"}
|
||||
OUT="out/obs-slo"
|
||||
mkdir -p "$OUT"
|
||||
|
||||
query() {
|
||||
local q="$1"
|
||||
curl -sG "${PROM_URL}/api/v1/query" --data-urlencode "query=${q}"
|
||||
}
|
||||
|
||||
echo "[slo] querying error rate (5m)"
|
||||
query "(rate(service_request_errors_total[5m]) / rate(service_requests_total[5m]))" > "${OUT}/error-rate-5m.json"
|
||||
|
||||
echo "[slo] querying error rate (1h)"
|
||||
query "(rate(service_request_errors_total[1h]) / rate(service_requests_total[1h]))" > "${OUT}/error-rate-1h.json"
|
||||
|
||||
echo "[slo] done; results in ${OUT}"
|
||||
19
devops/tools/observability/streaming-validate.sh
Normal file
19
devops/tools/observability/streaming-validate.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# DEVOPS-OBS-52-001: validate streaming pipeline knobs
|
||||
|
||||
OUT="out/obs-stream"
|
||||
mkdir -p "$OUT"
|
||||
|
||||
echo "[obs-stream] checking NATS connectivity"
|
||||
if command -v nats >/dev/null 2>&1; then
|
||||
nats --server "${NATS_URL:-nats://localhost:4222}" req health.ping ping || true
|
||||
else
|
||||
echo "nats CLI not installed; skipping connectivity check" > "${OUT}/nats.txt"
|
||||
fi
|
||||
|
||||
echo "[obs-stream] dumping retention/partitions (Kafka-like env variables)"
|
||||
env | grep -E 'KAFKA_|REDIS_|NATS_' | sort > "${OUT}/env.txt"
|
||||
|
||||
echo "[obs-stream] done; outputs in $OUT"
|
||||
28
devops/tools/offline-tools/fetch-sbomservice-deps.sh
Normal file
28
devops/tools/offline-tools/fetch-sbomservice-deps.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
||||
PACKAGES_DIR="$ROOT/.nuget/packages"
|
||||
TMP_DIR="$ROOT/tmp/sbomservice-feed"
|
||||
PROJECT="$TMP_DIR/probe.csproj"
|
||||
|
||||
mkdir -p "$TMP_DIR" "$PACKAGES_DIR"
|
||||
|
||||
cat > "$PROJECT" <<'CS'
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.IdentityModel.Tokens" Version="8.14.0" />
|
||||
<PackageReference Include="Pkcs11Interop" Version="4.1.0" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
CS
|
||||
|
||||
dotnet restore "$PROJECT" \
|
||||
--packages "$PACKAGES_DIR" \
|
||||
--ignore-failed-sources \
|
||||
/p:RestoreUseStaticGraphEvaluation=true \
|
||||
/p:RestorePackagesWithLockFile=false
|
||||
|
||||
find "$PACKAGES_DIR" -name '*.nupkg' -maxdepth 5 -type f -printf '%P\n' | sort
|
||||
BIN
devops/tools/openssl1.1/lib/libcrypto.so.1.1
Normal file
BIN
devops/tools/openssl1.1/lib/libcrypto.so.1.1
Normal file
Binary file not shown.
BIN
devops/tools/openssl1.1/lib/libssl.so.1.1
Normal file
BIN
devops/tools/openssl1.1/lib/libssl.so.1.1
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user