Refactor code structure for improved readability and maintainability; removed redundant code blocks and optimized function calls.
This commit is contained in:
15
scripts/__fixtures__/api-compat/new.yaml
Normal file
15
scripts/__fixtures__/api-compat/new.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: Demo API
|
||||
version: 1.1.0
|
||||
paths:
|
||||
/foo:
|
||||
get:
|
||||
responses:
|
||||
"201":
|
||||
description: created
|
||||
/bar:
|
||||
get:
|
||||
responses:
|
||||
"200":
|
||||
description: ok
|
||||
10
scripts/__fixtures__/api-compat/old.yaml
Normal file
10
scripts/__fixtures__/api-compat/old.yaml
Normal file
@@ -0,0 +1,10 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: Demo API
|
||||
version: 1.0.0
|
||||
paths:
|
||||
/foo:
|
||||
get:
|
||||
responses:
|
||||
"200":
|
||||
description: ok
|
||||
90
scripts/api-changelog.mjs
Normal file
90
scripts/api-changelog.mjs
Normal file
@@ -0,0 +1,90 @@
|
||||
#!/usr/bin/env node
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import yaml from 'yaml';
|
||||
|
||||
const ROOT = path.resolve('src/Api/StellaOps.Api.OpenApi');
|
||||
const BASELINE = path.join(ROOT, 'baselines', 'stella-baseline.yaml');
|
||||
const CURRENT = path.join(ROOT, 'stella.yaml');
|
||||
const OUTPUT = path.join(ROOT, 'CHANGELOG.md');
|
||||
|
||||
function panic(message) {
|
||||
console.error(`[api:changelog] ${message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function loadSpec(file) {
|
||||
if (!fs.existsSync(file)) {
|
||||
panic(`Spec not found: ${file}`);
|
||||
}
|
||||
return yaml.parse(fs.readFileSync(file, 'utf8'));
|
||||
}
|
||||
|
||||
function enumerateOps(spec) {
|
||||
const ops = new Map();
|
||||
for (const [route, methods] of Object.entries(spec.paths || {})) {
|
||||
for (const [method, operation] of Object.entries(methods || {})) {
|
||||
const lower = method.toLowerCase();
|
||||
if (!['get','post','put','delete','patch','head','options','trace'].includes(lower)) continue;
|
||||
const id = `${lower.toUpperCase()} ${route}`;
|
||||
ops.set(id, operation || {});
|
||||
}
|
||||
}
|
||||
return ops;
|
||||
}
|
||||
|
||||
function diffSpecs(oldSpec, newSpec) {
|
||||
const oldOps = enumerateOps(oldSpec);
|
||||
const newOps = enumerateOps(newSpec);
|
||||
const additive = [];
|
||||
const breaking = [];
|
||||
|
||||
for (const id of newOps.keys()) {
|
||||
if (!oldOps.has(id)) {
|
||||
additive.push(id);
|
||||
}
|
||||
}
|
||||
for (const id of oldOps.keys()) {
|
||||
if (!newOps.has(id)) {
|
||||
breaking.push(id);
|
||||
}
|
||||
}
|
||||
return { additive: additive.sort(), breaking: breaking.sort() };
|
||||
}
|
||||
|
||||
function renderMarkdown(diff) {
|
||||
const lines = [];
|
||||
lines.push('# API Changelog');
|
||||
lines.push('');
|
||||
const date = new Date().toISOString();
|
||||
lines.push(`Generated: ${date}`);
|
||||
lines.push('');
|
||||
lines.push('## Additive Operations');
|
||||
if (diff.additive.length === 0) {
|
||||
lines.push('- None');
|
||||
} else {
|
||||
diff.additive.forEach((op) => lines.push(`- ${op}`));
|
||||
}
|
||||
lines.push('');
|
||||
lines.push('## Breaking Operations');
|
||||
if (diff.breaking.length === 0) {
|
||||
lines.push('- None');
|
||||
} else {
|
||||
diff.breaking.forEach((op) => lines.push(`- ${op}`));
|
||||
}
|
||||
lines.push('');
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function main() {
|
||||
if (!fs.existsSync(BASELINE)) {
|
||||
console.log('[api:changelog] baseline missing; skipping');
|
||||
return;
|
||||
}
|
||||
const diff = diffSpecs(loadSpec(BASELINE), loadSpec(CURRENT));
|
||||
const markdown = renderMarkdown(diff);
|
||||
fs.writeFileSync(OUTPUT, markdown, 'utf8');
|
||||
console.log(`[api:changelog] wrote changelog to ${OUTPUT}`);
|
||||
}
|
||||
|
||||
main();
|
||||
104
scripts/api-compat-changelog.mjs
Normal file
104
scripts/api-compat-changelog.mjs
Normal file
@@ -0,0 +1,104 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Generate a Markdown changelog from two OpenAPI specs using the api-compat-diff tool.
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/api-compat-changelog.mjs <oldSpec> <newSpec> [--title "Release X"] [--fail-on-breaking]
|
||||
*
|
||||
* Output is written to stdout.
|
||||
*/
|
||||
import { execFileSync } from 'child_process';
|
||||
import process from 'process';
|
||||
import path from 'path';
|
||||
|
||||
function panic(message) {
|
||||
console.error(`[api-compat-changelog] ${message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function parseArgs(argv) {
|
||||
const args = argv.slice(2);
|
||||
if (args.length < 2) {
|
||||
panic('Usage: node scripts/api-compat-changelog.mjs <oldSpec> <newSpec> [--title "Release X"] [--fail-on-breaking]');
|
||||
}
|
||||
|
||||
const opts = {
|
||||
oldSpec: args[0],
|
||||
newSpec: args[1],
|
||||
title: 'API Compatibility Report',
|
||||
failOnBreaking: false,
|
||||
};
|
||||
|
||||
for (let i = 2; i < args.length; i += 1) {
|
||||
const arg = args[i];
|
||||
if (arg === '--title' && args[i + 1]) {
|
||||
opts.title = args[i + 1];
|
||||
i += 1;
|
||||
} else if (arg === '--fail-on-breaking') {
|
||||
opts.failOnBreaking = true;
|
||||
}
|
||||
}
|
||||
|
||||
return opts;
|
||||
}
|
||||
|
||||
function runCompatDiff(oldSpec, newSpec) {
|
||||
const output = execFileSync(
|
||||
'node',
|
||||
['scripts/api-compat-diff.mjs', oldSpec, newSpec, '--output', 'json'],
|
||||
{ encoding: 'utf8' }
|
||||
);
|
||||
return JSON.parse(output);
|
||||
}
|
||||
|
||||
function formatList(items, symbol) {
|
||||
if (!items || items.length === 0) {
|
||||
return `${symbol} None`;
|
||||
}
|
||||
return items.map((item) => `${symbol} ${item}`).join('\n');
|
||||
}
|
||||
|
||||
function renderMarkdown(title, diff, oldSpec, newSpec) {
|
||||
return [
|
||||
`# ${title}`,
|
||||
'',
|
||||
`- Old spec: \`${path.relative(process.cwd(), oldSpec)}\``,
|
||||
`- New spec: \`${path.relative(process.cwd(), newSpec)}\``,
|
||||
'',
|
||||
'## Summary',
|
||||
`- Additive operations: ${diff.additive.operations.length}`,
|
||||
`- Breaking operations: ${diff.breaking.operations.length}`,
|
||||
`- Additive responses: ${diff.additive.responses.length}`,
|
||||
`- Breaking responses: ${diff.breaking.responses.length}`,
|
||||
'',
|
||||
'## Additive',
|
||||
'### Operations',
|
||||
formatList(diff.additive.operations, '-'),
|
||||
'',
|
||||
'### Responses',
|
||||
formatList(diff.additive.responses, '-'),
|
||||
'',
|
||||
'## Breaking',
|
||||
'### Operations',
|
||||
formatList(diff.breaking.operations, '-'),
|
||||
'',
|
||||
'### Responses',
|
||||
formatList(diff.breaking.responses, '-'),
|
||||
'',
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
function main() {
|
||||
const opts = parseArgs(process.argv);
|
||||
const diff = runCompatDiff(opts.oldSpec, opts.newSpec);
|
||||
const markdown = renderMarkdown(opts.title, diff, opts.oldSpec, opts.newSpec);
|
||||
console.log(markdown);
|
||||
|
||||
if (opts.failOnBreaking && (diff.breaking.operations.length > 0 || diff.breaking.responses.length > 0)) {
|
||||
process.exit(2);
|
||||
}
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
main();
|
||||
}
|
||||
26
scripts/api-compat-changelog.test.mjs
Normal file
26
scripts/api-compat-changelog.test.mjs
Normal file
@@ -0,0 +1,26 @@
|
||||
import assert from 'assert';
|
||||
import { fileURLToPath } from 'url';
|
||||
import path from 'path';
|
||||
import { execFileSync } from 'child_process';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const root = path.join(__dirname, '..');
|
||||
|
||||
const fixturesDir = path.join(root, 'scripts', '__fixtures__', 'api-compat');
|
||||
const oldSpec = path.join(fixturesDir, 'old.yaml');
|
||||
const newSpec = path.join(fixturesDir, 'new.yaml');
|
||||
|
||||
const output = execFileSync('node', ['scripts/api-compat-changelog.mjs', oldSpec, newSpec, '--title', 'Test Report'], {
|
||||
cwd: root,
|
||||
encoding: 'utf8',
|
||||
});
|
||||
|
||||
assert(output.includes('# Test Report'));
|
||||
assert(output.includes('Additive operations: 1'));
|
||||
assert(output.includes('Breaking operations: 0'));
|
||||
assert(output.includes('- get /bar'));
|
||||
assert(output.includes('- get /foo -> 201'));
|
||||
assert(output.includes('- get /foo -> 200'));
|
||||
|
||||
console.log('api-compat-changelog test passed');
|
||||
194
scripts/api-compat-diff.mjs
Normal file
194
scripts/api-compat-diff.mjs
Normal file
@@ -0,0 +1,194 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* API compatibility diff tool
|
||||
* Compares two OpenAPI 3.x specs (YAML or JSON) and reports additive vs breaking changes.
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/api-compat-diff.mjs <oldSpec> <newSpec> [--output json|text] [--fail-on-breaking]
|
||||
*
|
||||
* Output (text):
|
||||
* - Added operations (additive)
|
||||
* - Removed operations (breaking)
|
||||
* - Added responses (additive)
|
||||
* - Removed responses (breaking)
|
||||
*
|
||||
* Output (json):
|
||||
* {
|
||||
* additive: { operations: [...], responses: [...] },
|
||||
* breaking: { operations: [...], responses: [...] }
|
||||
* }
|
||||
*
|
||||
* Exit codes:
|
||||
* 0 => success
|
||||
* 1 => invalid/missing args or IO/parsing error
|
||||
* 2 => breaking changes detected with --fail-on-breaking
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import process from 'process';
|
||||
import yaml from 'yaml';
|
||||
|
||||
function panic(message) {
|
||||
console.error(`[api-compat-diff] ${message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function parseArgs(argv) {
|
||||
const args = argv.slice(2);
|
||||
const opts = { output: 'text', failOnBreaking: false };
|
||||
|
||||
if (args.length < 2) {
|
||||
panic('Usage: node scripts/api-compat-diff.mjs <oldSpec> <newSpec> [--output json|text] [--fail-on-breaking]');
|
||||
}
|
||||
|
||||
[opts.oldSpec, opts.newSpec] = args.slice(0, 2);
|
||||
|
||||
for (let i = 2; i < args.length; i += 1) {
|
||||
const arg = args[i];
|
||||
if (arg === '--output' && args[i + 1]) {
|
||||
opts.output = args[i + 1].toLowerCase();
|
||||
i += 1;
|
||||
} else if (arg === '--fail-on-breaking') {
|
||||
opts.failOnBreaking = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!['text', 'json'].includes(opts.output)) {
|
||||
panic(`Unsupported output mode: ${opts.output}`);
|
||||
}
|
||||
|
||||
return opts;
|
||||
}
|
||||
|
||||
function loadSpec(specPath) {
|
||||
if (!fs.existsSync(specPath)) {
|
||||
panic(`Spec not found: ${specPath}`);
|
||||
}
|
||||
|
||||
const raw = fs.readFileSync(specPath, 'utf8');
|
||||
const ext = path.extname(specPath).toLowerCase();
|
||||
|
||||
try {
|
||||
if (ext === '.json') {
|
||||
return JSON.parse(raw);
|
||||
}
|
||||
return yaml.parse(raw);
|
||||
} catch (err) {
|
||||
panic(`Failed to parse ${specPath}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function enumerateOperations(spec) {
|
||||
const ops = new Map();
|
||||
if (!spec?.paths || typeof spec.paths !== 'object') {
|
||||
return ops;
|
||||
}
|
||||
|
||||
for (const [pathKey, pathItem] of Object.entries(spec.paths)) {
|
||||
if (!pathItem || typeof pathItem !== 'object') {
|
||||
continue;
|
||||
}
|
||||
for (const method of Object.keys(pathItem)) {
|
||||
const lowerMethod = method.toLowerCase();
|
||||
if (!['get', 'put', 'post', 'delete', 'patch', 'head', 'options', 'trace'].includes(lowerMethod)) {
|
||||
continue;
|
||||
}
|
||||
const opId = `${lowerMethod} ${pathKey}`;
|
||||
const responses = pathItem[method]?.responses ?? {};
|
||||
ops.set(opId, {
|
||||
method: lowerMethod,
|
||||
path: pathKey,
|
||||
responses: new Set(Object.keys(responses)),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return ops;
|
||||
}
|
||||
|
||||
function diffOperations(oldOps, newOps) {
|
||||
const additiveOps = [];
|
||||
const breakingOps = [];
|
||||
const additiveResponses = [];
|
||||
const breakingResponses = [];
|
||||
|
||||
// Operations added or removed
|
||||
for (const [id, op] of newOps.entries()) {
|
||||
if (!oldOps.has(id)) {
|
||||
additiveOps.push(id);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [id] of oldOps.entries()) {
|
||||
if (!newOps.has(id)) {
|
||||
breakingOps.push(id);
|
||||
}
|
||||
}
|
||||
|
||||
// Response-level diffs for shared operations
|
||||
for (const [id, newOp] of newOps.entries()) {
|
||||
if (!oldOps.has(id)) continue;
|
||||
const oldOp = oldOps.get(id);
|
||||
|
||||
for (const code of newOp.responses) {
|
||||
if (!oldOp.responses.has(code)) {
|
||||
additiveResponses.push(`${id} -> ${code}`);
|
||||
}
|
||||
}
|
||||
|
||||
for (const code of oldOp.responses) {
|
||||
if (!newOp.responses.has(code)) {
|
||||
breakingResponses.push(`${id} -> ${code}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
additive: {
|
||||
operations: additiveOps.sort(),
|
||||
responses: additiveResponses.sort(),
|
||||
},
|
||||
breaking: {
|
||||
operations: breakingOps.sort(),
|
||||
responses: breakingResponses.sort(),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function renderText(diff) {
|
||||
const lines = [];
|
||||
lines.push('Additive:');
|
||||
lines.push(` Operations: ${diff.additive.operations.length}`);
|
||||
diff.additive.operations.forEach((op) => lines.push(` + ${op}`));
|
||||
lines.push(` Responses: ${diff.additive.responses.length}`);
|
||||
diff.additive.responses.forEach((resp) => lines.push(` + ${resp}`));
|
||||
lines.push('Breaking:');
|
||||
lines.push(` Operations: ${diff.breaking.operations.length}`);
|
||||
diff.breaking.operations.forEach((op) => lines.push(` - ${op}`));
|
||||
lines.push(` Responses: ${diff.breaking.responses.length}`);
|
||||
diff.breaking.responses.forEach((resp) => lines.push(` - ${resp}`));
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function main() {
|
||||
const opts = parseArgs(process.argv);
|
||||
const oldSpec = loadSpec(opts.oldSpec);
|
||||
const newSpec = loadSpec(opts.newSpec);
|
||||
|
||||
const diff = diffOperations(enumerateOperations(oldSpec), enumerateOperations(newSpec));
|
||||
|
||||
if (opts.output === 'json') {
|
||||
console.log(JSON.stringify(diff, null, 2));
|
||||
} else {
|
||||
console.log(renderText(diff));
|
||||
}
|
||||
|
||||
if (opts.failOnBreaking && (diff.breaking.operations.length > 0 || diff.breaking.responses.length > 0)) {
|
||||
process.exit(2);
|
||||
}
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
main();
|
||||
}
|
||||
25
scripts/api-compat-diff.test.mjs
Normal file
25
scripts/api-compat-diff.test.mjs
Normal file
@@ -0,0 +1,25 @@
|
||||
import assert from 'assert';
|
||||
import { fileURLToPath } from 'url';
|
||||
import path from 'path';
|
||||
import { execFileSync } from 'child_process';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const fixturesDir = path.join(__dirname, '__fixtures__', 'api-compat');
|
||||
const oldSpec = path.join(fixturesDir, 'old.yaml');
|
||||
const newSpec = path.join(fixturesDir, 'new.yaml');
|
||||
|
||||
const output = execFileSync('node', ['scripts/api-compat-diff.mjs', oldSpec, newSpec, '--output', 'json'], {
|
||||
cwd: path.join(__dirname, '..'),
|
||||
encoding: 'utf8',
|
||||
});
|
||||
|
||||
const diff = JSON.parse(output);
|
||||
|
||||
assert.deepStrictEqual(diff.additive.operations, ['get /bar']);
|
||||
assert.deepStrictEqual(diff.breaking.operations, []);
|
||||
assert.deepStrictEqual(diff.additive.responses, ['get /foo -> 201']);
|
||||
assert.deepStrictEqual(diff.breaking.responses, ['get /foo -> 200']);
|
||||
|
||||
console.log('api-compat-diff test passed');
|
||||
139
scripts/api-example-coverage.mjs
Normal file
139
scripts/api-example-coverage.mjs
Normal file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env node
|
||||
// Verifies every OpenAPI operation has at least one request example and one response example.
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { parse } from 'yaml';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
const OAS_ROOT = path.join(ROOT, 'src', 'Api', 'StellaOps.Api.OpenApi');
|
||||
|
||||
async function main() {
|
||||
if (!fs.existsSync(OAS_ROOT)) {
|
||||
console.log('[api:examples] no OpenAPI directory found; skipping');
|
||||
return;
|
||||
}
|
||||
|
||||
const files = await findYamlFiles(OAS_ROOT);
|
||||
if (files.length === 0) {
|
||||
console.log('[api:examples] no OpenAPI files found; skipping');
|
||||
return;
|
||||
}
|
||||
|
||||
const failures = [];
|
||||
|
||||
for (const relative of files) {
|
||||
const fullPath = path.join(OAS_ROOT, relative);
|
||||
const content = fs.readFileSync(fullPath, 'utf8');
|
||||
let doc;
|
||||
try {
|
||||
doc = parse(content, { prettyErrors: true });
|
||||
} catch (err) {
|
||||
failures.push({ file: relative, path: '', method: '', reason: `YAML parse error: ${err.message}` });
|
||||
continue;
|
||||
}
|
||||
|
||||
const paths = doc?.paths || {};
|
||||
for (const [route, methods] of Object.entries(paths)) {
|
||||
for (const [method, operation] of Object.entries(methods || {})) {
|
||||
if (!isHttpMethod(method)) continue;
|
||||
|
||||
const hasRequestExample = operation?.requestBody ? hasExample(operation.requestBody) : true;
|
||||
const hasResponseExample = Object.values(operation?.responses || {}).some(resp => hasExample(resp));
|
||||
|
||||
if (!hasRequestExample || !hasResponseExample) {
|
||||
const missing = [];
|
||||
if (!hasRequestExample) missing.push('request');
|
||||
if (!hasResponseExample) missing.push('response');
|
||||
failures.push({ file: relative, path: route, method, reason: `missing ${missing.join(' & ')} example` });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (failures.length > 0) {
|
||||
console.error('[api:examples] found operations without examples:');
|
||||
for (const f of failures) {
|
||||
const locus = [f.file, f.path, f.method.toUpperCase()].filter(Boolean).join(' ');
|
||||
console.error(` - ${locus}: ${f.reason}`);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('[api:examples] all operations contain request and response examples');
|
||||
}
|
||||
|
||||
async function findYamlFiles(root) {
|
||||
const results = [];
|
||||
async function walk(dir) {
|
||||
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const full = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
await walk(full);
|
||||
} else if (entry.isFile() && entry.name.toLowerCase().endsWith('.yaml')) {
|
||||
results.push(path.relative(root, full));
|
||||
}
|
||||
}
|
||||
}
|
||||
await walk(root);
|
||||
return results;
|
||||
}
|
||||
|
||||
function isHttpMethod(method) {
|
||||
return ['get', 'post', 'put', 'patch', 'delete', 'options', 'head', 'trace'].includes(method.toLowerCase());
|
||||
}
|
||||
|
||||
function hasExample(node) {
|
||||
if (!node) return false;
|
||||
|
||||
// request/response objects may include content -> mediaType -> schema/example/examples
|
||||
const content = node.content || {};
|
||||
for (const media of Object.values(content)) {
|
||||
if (!media) continue;
|
||||
if (media.example !== undefined) return true;
|
||||
if (media.examples && Object.keys(media.examples).length > 0) return true;
|
||||
if (media.schema && hasSchemaExample(media.schema)) return true;
|
||||
}
|
||||
|
||||
// response objects may have "examples" directly (non-standard but allowed by spectral rules)
|
||||
if (node.examples && Object.keys(node.examples).length > 0) return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function hasSchemaExample(schema) {
|
||||
if (!schema) return false;
|
||||
if (schema.example !== undefined) return true;
|
||||
if (schema.examples && Array.isArray(schema.examples) && schema.examples.length > 0) return true;
|
||||
|
||||
// Recurse into allOf/oneOf/anyOf
|
||||
const composites = ['allOf', 'oneOf', 'anyOf'];
|
||||
for (const key of composites) {
|
||||
if (Array.isArray(schema[key])) {
|
||||
if (schema[key].some(hasSchemaExample)) return true;
|
||||
}
|
||||
}
|
||||
|
||||
// For objects, check properties
|
||||
if (schema.type === 'object' && schema.properties) {
|
||||
for (const value of Object.values(schema.properties)) {
|
||||
if (hasSchemaExample(value)) return true;
|
||||
}
|
||||
}
|
||||
|
||||
// For arrays, check items
|
||||
if (schema.type === 'array' && schema.items) {
|
||||
return hasSchemaExample(schema.items);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error('[api:examples] fatal error', err);
|
||||
process.exit(1);
|
||||
});
|
||||
115
scripts/provenance_backfill.py
Normal file
115
scripts/provenance_backfill.py
Normal file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Deterministic provenance backfill helper for Sprint 401.
|
||||
|
||||
Reads the attestation inventory NDJSON and subject→Rekor map, emits a sorted
|
||||
NDJSON log of resolved backfill actions. No network calls are performed.
|
||||
|
||||
Usage:
|
||||
python scripts/provenance_backfill.py \
|
||||
--inventory docs/provenance/attestation-inventory-2025-11-18.ndjson \
|
||||
--subject-map docs/provenance/subject-rekor-map-2025-11-18.json \
|
||||
--out logs/provenance-backfill-2025-11-18.ndjson
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, List, Optional
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class InventoryRecord:
|
||||
subject: str
|
||||
dsse_hash: str
|
||||
rekor_entry: str
|
||||
|
||||
@staticmethod
|
||||
def from_json(obj: dict) -> "InventoryRecord":
|
||||
return InventoryRecord(
|
||||
subject=obj["subject"],
|
||||
dsse_hash=obj["dsseHash"],
|
||||
rekor_entry=obj.get("rekorEntry", ""),
|
||||
)
|
||||
|
||||
|
||||
def load_inventory(path: Path) -> List[InventoryRecord]:
|
||||
records: List[InventoryRecord] = []
|
||||
with path.open("r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
records.append(InventoryRecord.from_json(json.loads(line)))
|
||||
return records
|
||||
|
||||
|
||||
def load_subject_map(path: Path) -> Dict[str, str]:
|
||||
with path.open("r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def validate_hash(prefix: str, value: str) -> None:
|
||||
if not value.startswith("sha256:") or len(value) <= len("sha256:"):
|
||||
raise ValueError(f"{prefix} must be sha256:<hex>: got '{value}'")
|
||||
|
||||
|
||||
def build_backfill_entries(
|
||||
inventory: Iterable[InventoryRecord],
|
||||
subject_map: Dict[str, str],
|
||||
) -> List[dict]:
|
||||
entries: List[dict] = []
|
||||
for rec in inventory:
|
||||
validate_hash("dsseHash", rec.dsse_hash)
|
||||
resolved_rekor = subject_map.get(rec.subject)
|
||||
status = "resolved" if resolved_rekor else "missing_rekor_entry"
|
||||
rekor_entry = resolved_rekor or rec.rekor_entry
|
||||
if rekor_entry:
|
||||
validate_hash("rekorEntry", rekor_entry)
|
||||
entries.append(
|
||||
{
|
||||
"subject": rec.subject,
|
||||
"dsseHash": rec.dsse_hash,
|
||||
"rekorEntry": rekor_entry,
|
||||
"status": status,
|
||||
}
|
||||
)
|
||||
entries.sort(key=lambda o: (o["subject"], o["rekorEntry"] or ""))
|
||||
return entries
|
||||
|
||||
|
||||
def write_ndjson(path: Path, entries: Iterable[dict]) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open("w", encoding="utf-8") as f:
|
||||
for entry in entries:
|
||||
f.write(json.dumps(entry, separators=(",", ":"), sort_keys=True))
|
||||
f.write("\n")
|
||||
|
||||
|
||||
def parse_args(argv: Optional[List[str]] = None) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Deterministic provenance backfill helper.")
|
||||
parser.add_argument("--inventory", required=True, type=Path, help="Path to attestation inventory NDJSON.")
|
||||
parser.add_argument("--subject-map", required=True, type=Path, help="Path to subject→Rekor JSON map.")
|
||||
parser.add_argument("--out", required=True, type=Path, help="Output NDJSON log path.")
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: Optional[List[str]] = None) -> int:
|
||||
args = parse_args(argv)
|
||||
inventory = load_inventory(args.inventory)
|
||||
subject_map = load_subject_map(args.subject_map)
|
||||
entries = build_backfill_entries(inventory, subject_map)
|
||||
write_ndjson(args.out, entries)
|
||||
|
||||
resolved = sum(1 for e in entries if e["status"] == "resolved")
|
||||
missing = sum(1 for e in entries if e["status"] != "resolved")
|
||||
print(f"wrote {len(entries)} entries -> {args.out} (resolved={resolved}, missing={missing})")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Reference in New Issue
Block a user