Refactor code structure for improved readability and maintainability; removed redundant code blocks and optimized function calls.
This commit is contained in:
10
src/Api/StellaOps.Api.OpenApi/CHANGELOG.md
Normal file
10
src/Api/StellaOps.Api.OpenApi/CHANGELOG.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# API Changelog
|
||||
|
||||
Generated: 2025-11-19T07:40:32.086Z
|
||||
|
||||
## Additive Operations
|
||||
- GET /export-center/bundles/{bundleId}/manifest
|
||||
- GET /graph/graphs/{graphId}/nodes
|
||||
|
||||
## Breaking Operations
|
||||
- None
|
||||
17
src/Api/StellaOps.Api.OpenApi/_shared/parameters/paging.yaml
Normal file
17
src/Api/StellaOps.Api.OpenApi/_shared/parameters/paging.yaml
Normal file
@@ -0,0 +1,17 @@
|
||||
parameters:
|
||||
LimitParam:
|
||||
name: limit
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
minimum: 1
|
||||
maximum: 200
|
||||
example: 50
|
||||
CursorParam:
|
||||
name: cursor
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
example: eyJyIjoiMjAyNS0xMS0xOC0wMDIifQ
|
||||
@@ -0,0 +1,9 @@
|
||||
parameters:
|
||||
TenantParam:
|
||||
name: tenant
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
description: Filter results to a specific tenant identifier.
|
||||
example: acme
|
||||
@@ -0,0 +1,13 @@
|
||||
responses:
|
||||
ErrorResponse:
|
||||
description: Error envelope
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '../schemas/common.yaml#/schemas/ErrorEnvelope'
|
||||
HealthResponse:
|
||||
description: Health envelope
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '../schemas/common.yaml#/schemas/HealthEnvelope'
|
||||
37
src/Api/StellaOps.Api.OpenApi/_shared/schemas/common.yaml
Normal file
37
src/Api/StellaOps.Api.OpenApi/_shared/schemas/common.yaml
Normal file
@@ -0,0 +1,37 @@
|
||||
schemas:
|
||||
ErrorEnvelope:
|
||||
type: object
|
||||
required: [code, message]
|
||||
properties:
|
||||
code:
|
||||
type: string
|
||||
example: service_unavailable
|
||||
message:
|
||||
type: string
|
||||
traceId:
|
||||
type: string
|
||||
description: Correlation identifier for troubleshooting
|
||||
HealthEnvelope:
|
||||
type: object
|
||||
required: [status, service]
|
||||
properties:
|
||||
status:
|
||||
type: string
|
||||
example: ok
|
||||
service:
|
||||
type: string
|
||||
example: any-service
|
||||
PageMetadata:
|
||||
type: object
|
||||
required:
|
||||
- hasMore
|
||||
properties:
|
||||
hasMore:
|
||||
type: boolean
|
||||
description: Indicates if additional pages are available.
|
||||
nextCursor:
|
||||
type: string
|
||||
description: Cursor to fetch the next page.
|
||||
previousCursor:
|
||||
type: string
|
||||
description: Cursor to fetch the previous page.
|
||||
@@ -0,0 +1,12 @@
|
||||
securitySchemes:
|
||||
OAuthClientCredentials:
|
||||
type: oauth2
|
||||
description: OAuth 2.1 client credentials flow scoped per service.
|
||||
flows:
|
||||
clientCredentials:
|
||||
tokenUrl: /token
|
||||
scopes: {}
|
||||
BearerAuth:
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
1377
src/Api/StellaOps.Api.OpenApi/baselines/stella-baseline.yaml
Normal file
1377
src/Api/StellaOps.Api.OpenApi/baselines/stella-baseline.yaml
Normal file
File diff suppressed because it is too large
Load Diff
205
src/Api/StellaOps.Api.OpenApi/compose.mjs
Normal file
205
src/Api/StellaOps.Api.OpenApi/compose.mjs
Normal file
@@ -0,0 +1,205 @@
|
||||
#!/usr/bin/env node
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import yaml from 'yaml';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const ROOT = path.resolve(__dirname);
|
||||
const OUTPUT = path.join(ROOT, 'stella.yaml');
|
||||
const SHARED_COMPONENTS_DIR = path.join(ROOT, '_shared');
|
||||
const SHARED_SCHEMAS_DIR = path.join(SHARED_COMPONENTS_DIR, 'schemas');
|
||||
const SHARED_RESPONSES_DIR = path.join(SHARED_COMPONENTS_DIR, 'responses');
|
||||
const SHARED_PARAMETERS_DIR = path.join(SHARED_COMPONENTS_DIR, 'parameters');
|
||||
const SHARED_SECURITY_DIR = path.join(SHARED_COMPONENTS_DIR, 'securitySchemes');
|
||||
|
||||
function readServiceSpecs() {
|
||||
const entries = fs.readdirSync(ROOT, { withFileTypes: true });
|
||||
const services = [];
|
||||
for (const entry of entries) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
const name = entry.name;
|
||||
if (name.startsWith('.')) continue;
|
||||
const specPath = path.join(ROOT, name, 'openapi.yaml');
|
||||
if (!fs.existsSync(specPath)) continue;
|
||||
const content = fs.readFileSync(specPath, 'utf8');
|
||||
const doc = yaml.parse(content, { prettyErrors: true });
|
||||
services.push({ name, specPath, doc });
|
||||
}
|
||||
services.sort((a, b) => a.name.localeCompare(b.name));
|
||||
return services;
|
||||
}
|
||||
|
||||
function mergeSpecs(services) {
|
||||
const aggregate = {
|
||||
openapi: '3.1.0',
|
||||
info: {
|
||||
title: 'StellaOps Aggregate API',
|
||||
version: '0.0.1',
|
||||
description: 'Composed OpenAPI from per-service specs. This file is generated by compose.mjs.',
|
||||
},
|
||||
servers: [],
|
||||
paths: {},
|
||||
components: { schemas: {}, parameters: {}, securitySchemes: {}, responses: {} },
|
||||
};
|
||||
|
||||
// Shared components (schemas only for now)
|
||||
mergeShared(aggregate, SHARED_SCHEMAS_DIR, 'schemas');
|
||||
mergeShared(aggregate, SHARED_RESPONSES_DIR, 'responses');
|
||||
mergeShared(aggregate, SHARED_PARAMETERS_DIR, 'parameters');
|
||||
mergeShared(aggregate, SHARED_SECURITY_DIR, 'securitySchemes');
|
||||
|
||||
for (const { name, doc } of services) {
|
||||
// servers
|
||||
if (Array.isArray(doc.servers)) {
|
||||
for (const srv of doc.servers) {
|
||||
aggregate.servers.push({ ...srv, 'x-service': name });
|
||||
}
|
||||
}
|
||||
|
||||
// paths
|
||||
for (const [p, pathItem] of Object.entries(doc.paths || {})) {
|
||||
const namespacedPath = normalizePath(`/${name}${p}`);
|
||||
aggregate.paths[namespacedPath] ??= {};
|
||||
for (const [method, op] of Object.entries(pathItem || {})) {
|
||||
const lower = method.toLowerCase();
|
||||
if (!['get', 'put', 'post', 'delete', 'patch', 'head', 'options', 'trace'].includes(lower)) continue;
|
||||
const opRewritten = rewriteRefs(op, name);
|
||||
const existing = aggregate.paths[namespacedPath][lower];
|
||||
if (existing) {
|
||||
throw new Error(`Path/method collision at ${namespacedPath} ${lower} (${p}) between services`);
|
||||
}
|
||||
aggregate.paths[namespacedPath][lower] = { ...opRewritten, 'x-service': name, 'x-original-path': p };
|
||||
}
|
||||
}
|
||||
|
||||
// schemas
|
||||
const schemas = doc.components?.schemas || {};
|
||||
for (const [schemaName, schemaDef] of Object.entries(schemas)) {
|
||||
const key = `${name}.${schemaName}`;
|
||||
if (aggregate.components.schemas[key]) {
|
||||
throw new Error(`Schema collision for ${key}`);
|
||||
}
|
||||
aggregate.components.schemas[key] = rewriteRefs(schemaDef, name);
|
||||
}
|
||||
}
|
||||
|
||||
// de-duplicate servers
|
||||
const seenServers = new Set();
|
||||
aggregate.servers = aggregate.servers.filter((srv) => {
|
||||
const key = JSON.stringify(srv);
|
||||
if (seenServers.has(key)) return false;
|
||||
seenServers.add(key);
|
||||
return true;
|
||||
});
|
||||
|
||||
aggregate.paths = sortObject(aggregate.paths, sortPathItem);
|
||||
aggregate.components.schemas = sortObject(aggregate.components.schemas);
|
||||
aggregate.components.responses = sortObject(aggregate.components.responses);
|
||||
aggregate.components.parameters = sortObject(aggregate.components.parameters);
|
||||
aggregate.components.securitySchemes = sortObject(aggregate.components.securitySchemes);
|
||||
return aggregate;
|
||||
}
|
||||
|
||||
function normalizePath(pathValue) {
|
||||
if (!pathValue.startsWith('/')) {
|
||||
return '/' + pathValue;
|
||||
}
|
||||
return pathValue.replace(/\/{2,}/g, '/');
|
||||
}
|
||||
|
||||
function sortObject(obj, valueTransform = (v) => v) {
|
||||
const sorted = {};
|
||||
for (const key of Object.keys(obj).sort()) {
|
||||
const value = obj[key];
|
||||
sorted[key] = valueTransform(value);
|
||||
}
|
||||
return sorted;
|
||||
}
|
||||
|
||||
function sortPathItem(pathItem) {
|
||||
const methods = {};
|
||||
for (const key of Object.keys(pathItem).sort()) {
|
||||
methods[key] = pathItem[key];
|
||||
}
|
||||
return methods;
|
||||
}
|
||||
|
||||
function writeAggregate(doc) {
|
||||
const str = yaml.stringify(doc, { sortMapEntries: true });
|
||||
fs.writeFileSync(OUTPUT, str, 'utf8');
|
||||
console.log(`[stella-compose] wrote aggregate spec to ${OUTPUT}`);
|
||||
}
|
||||
|
||||
function rewriteRefs(node, serviceName) {
|
||||
if (node === null || node === undefined) return node;
|
||||
|
||||
if (Array.isArray(node)) {
|
||||
return node.map((item) => rewriteRefs(item, serviceName));
|
||||
}
|
||||
|
||||
if (typeof node !== 'object') {
|
||||
return node;
|
||||
}
|
||||
|
||||
const clone = Array.isArray(node) ? [] : {};
|
||||
for (const [key, value] of Object.entries(node)) {
|
||||
if (key === '$ref' && typeof value === 'string') {
|
||||
clone[key] = normalizeRef(value, serviceName);
|
||||
} else {
|
||||
clone[key] = rewriteRefs(value, serviceName);
|
||||
}
|
||||
}
|
||||
|
||||
return clone;
|
||||
}
|
||||
|
||||
function normalizeRef(refValue, serviceName) {
|
||||
if (refValue.startsWith('../_shared/schemas/')) {
|
||||
const name = refValue.split('#/schemas/')[1];
|
||||
return `#/components/schemas/${name}`;
|
||||
}
|
||||
|
||||
const prefix = '#/components/schemas/';
|
||||
if (refValue.startsWith(prefix)) {
|
||||
const name = refValue.slice(prefix.length);
|
||||
if (name.includes('.')) {
|
||||
return refValue; // already namespaced
|
||||
}
|
||||
return `${prefix}${serviceName}.${name}`;
|
||||
}
|
||||
|
||||
return refValue;
|
||||
}
|
||||
|
||||
function mergeShared(aggregate, dir, key) {
|
||||
if (!dir || !fs.existsSync(dir)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(dir).filter((f) => f.endsWith('.yaml') || f.endsWith('.yml'));
|
||||
for (const file of files.sort()) {
|
||||
const full = path.join(dir, file);
|
||||
const content = fs.readFileSync(full, 'utf8');
|
||||
const doc = yaml.parse(content, { prettyErrors: true });
|
||||
const entries = doc?.[key] || {};
|
||||
for (const [name, value] of Object.entries(entries)) {
|
||||
if (aggregate.components[key][name]) {
|
||||
throw new Error(`Shared ${key} collision for ${name}`);
|
||||
}
|
||||
aggregate.components[key][name] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function main() {
|
||||
const services = readServiceSpecs();
|
||||
if (services.length === 0) {
|
||||
console.log('[stella-compose] no service specs found; writing empty aggregate');
|
||||
}
|
||||
const aggregate = mergeSpecs(services);
|
||||
writeAggregate(aggregate);
|
||||
}
|
||||
|
||||
main();
|
||||
237
src/Api/StellaOps.Api.OpenApi/export-center/openapi.yaml
Normal file
237
src/Api/StellaOps.Api.OpenApi/export-center/openapi.yaml
Normal file
@@ -0,0 +1,237 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: StellaOps Export Center API (stub)
|
||||
version: 0.0.1
|
||||
description: Health and metadata scaffold for Export Center; replace with real contracts
|
||||
as authored.
|
||||
servers:
|
||||
- url: https://export.stellaops.local
|
||||
description: Example Export Center endpoint
|
||||
paths:
|
||||
/health:
|
||||
get:
|
||||
tags:
|
||||
- Health
|
||||
summary: Liveness probe
|
||||
responses:
|
||||
'200':
|
||||
description: Service is up
|
||||
content:
|
||||
application/json:
|
||||
examples:
|
||||
ok:
|
||||
value:
|
||||
status: ok
|
||||
service: export-center
|
||||
timestamp: '2025-11-18T00:00:00Z'
|
||||
'503':
|
||||
description: Service unhealthy or dependencies unavailable.
|
||||
content:
|
||||
application/json:
|
||||
examples:
|
||||
unhealthy:
|
||||
value:
|
||||
status: degraded
|
||||
service: export-center
|
||||
reason: object store unreachable
|
||||
timestamp: '2025-11-18T00:00:00Z'
|
||||
/healthz:
|
||||
get:
|
||||
summary: Service health
|
||||
tags:
|
||||
- Meta
|
||||
responses:
|
||||
'200':
|
||||
description: Service healthy
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/HealthResponse'
|
||||
examples:
|
||||
ok:
|
||||
summary: Healthy response
|
||||
value:
|
||||
status: ok
|
||||
service: export-center
|
||||
'503':
|
||||
description: Service unavailable
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
examples:
|
||||
unavailable:
|
||||
summary: Unhealthy response
|
||||
value:
|
||||
code: service_unavailable
|
||||
message: mirror bundle backlog exceeds SLA
|
||||
traceId: 3
|
||||
/bundles/{bundleId}:
|
||||
get:
|
||||
tags:
|
||||
- Bundles
|
||||
summary: Download export bundle by id
|
||||
parameters:
|
||||
- name: bundleId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
example: bundle-2025-11-18-001
|
||||
responses:
|
||||
'200':
|
||||
description: Bundle stream
|
||||
content:
|
||||
application/zip:
|
||||
examples:
|
||||
download:
|
||||
summary: Zip payload
|
||||
value: binary data
|
||||
'404':
|
||||
description: Bundle not found
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
examples:
|
||||
notFound:
|
||||
summary: Bundle missing
|
||||
value:
|
||||
code: export.bundle_not_found
|
||||
message: Bundle bundle-2025-11-18-001 not found.
|
||||
traceId: 01JF04NF
|
||||
/bundles:
|
||||
get:
|
||||
tags:
|
||||
- Bundles
|
||||
summary: List export bundles
|
||||
parameters:
|
||||
- $ref: '../_shared/parameters/tenant.yaml#/parameters/TenantParam'
|
||||
- $ref: '../_shared/parameters/paging.yaml#/parameters/LimitParam'
|
||||
- $ref: '../_shared/parameters/paging.yaml#/parameters/CursorParam'
|
||||
responses:
|
||||
'200':
|
||||
description: Bundle page
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
items:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/BundleSummary'
|
||||
metadata:
|
||||
$ref: '../_shared/schemas/common.yaml#/schemas/PageMetadata'
|
||||
examples:
|
||||
page:
|
||||
summary: First page of bundles
|
||||
value:
|
||||
items:
|
||||
- bundleId: bundle-2025-11-18-001
|
||||
createdAt: '2025-11-18T12:00:00Z'
|
||||
status: ready
|
||||
sizeBytes: 1048576
|
||||
- bundleId: bundle-2025-11-18-000
|
||||
createdAt: '2025-11-18T10:00:00Z'
|
||||
status: ready
|
||||
sizeBytes: 2048
|
||||
metadata:
|
||||
hasMore: true
|
||||
nextCursor: eyJyIjoiMjAyNS0xMS0xOC0wMDIifQ
|
||||
'400':
|
||||
description: Invalid request
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '../_shared/schemas/common.yaml#/schemas/ErrorEnvelope'
|
||||
examples:
|
||||
invalidTenant:
|
||||
summary: Tenant missing
|
||||
value:
|
||||
code: export.invalid_tenant
|
||||
message: tenant query parameter is required.
|
||||
traceId: 01JF04ERR3
|
||||
/bundles/{bundleId}/manifest:
|
||||
get:
|
||||
tags:
|
||||
- Bundles
|
||||
summary: Fetch bundle manifest metadata
|
||||
parameters:
|
||||
- name: bundleId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Manifest metadata
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/BundleManifest'
|
||||
examples:
|
||||
manifest:
|
||||
value:
|
||||
bundleId: bundle-2025-11-18-001
|
||||
contents:
|
||||
- type: advisory
|
||||
digest: sha256:abc123
|
||||
- type: vex
|
||||
digest: sha256:def456
|
||||
createdAt: '2025-11-18T12:00:00Z'
|
||||
'404':
|
||||
description: Bundle not found
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '../_shared/schemas/common.yaml#/schemas/ErrorEnvelope'
|
||||
components:
|
||||
schemas:
|
||||
BundleSummary:
|
||||
type: object
|
||||
required:
|
||||
- bundleId
|
||||
- createdAt
|
||||
- status
|
||||
properties:
|
||||
bundleId:
|
||||
type: string
|
||||
createdAt:
|
||||
type: string
|
||||
format: date-time
|
||||
status:
|
||||
type: string
|
||||
enum:
|
||||
- ready
|
||||
- building
|
||||
- failed
|
||||
sizeBytes:
|
||||
type: integer
|
||||
BundleManifest:
|
||||
type: object
|
||||
required:
|
||||
- bundleId
|
||||
- contents
|
||||
properties:
|
||||
bundleId:
|
||||
type: string
|
||||
contents:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
required: [type, digest]
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
example: advisory
|
||||
digest:
|
||||
type: string
|
||||
example: sha256:abc123
|
||||
createdAt:
|
||||
type: string
|
||||
format: date-time
|
||||
HealthResponse:
|
||||
$ref: ../_shared/schemas/common.yaml#/schemas/HealthEnvelope
|
||||
Error:
|
||||
$ref: ../_shared/schemas/common.yaml#/schemas/ErrorEnvelope
|
||||
153
src/Api/StellaOps.Api.OpenApi/graph/openapi.yaml
Normal file
153
src/Api/StellaOps.Api.OpenApi/graph/openapi.yaml
Normal file
@@ -0,0 +1,153 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: StellaOps Graph API (stub)
|
||||
version: 0.0.1
|
||||
description: Health and dataset status scaffold for Graph service; replace with
|
||||
full contract as authored.
|
||||
servers:
|
||||
- url: https://graph.stellaops.local
|
||||
description: Example Graph endpoint
|
||||
paths:
|
||||
/healthz:
|
||||
get:
|
||||
summary: Service health
|
||||
tags:
|
||||
- Meta
|
||||
responses:
|
||||
'200':
|
||||
description: Service healthy
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/HealthEnvelope'
|
||||
examples:
|
||||
ok:
|
||||
summary: Healthy response
|
||||
value:
|
||||
status: ok
|
||||
service: graph
|
||||
'503':
|
||||
description: Service unavailable
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
examples:
|
||||
unavailable:
|
||||
summary: Unhealthy response
|
||||
value:
|
||||
code: service_unavailable
|
||||
message: indexer lag exceeds threshold
|
||||
traceId: 5
|
||||
/graphs/{graphId}/status:
|
||||
get:
|
||||
summary: Get graph build status
|
||||
tags:
|
||||
- Graphs
|
||||
parameters:
|
||||
- name: graphId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- $ref: ../_shared/parameters/tenant.yaml#/parameters/TenantParam
|
||||
responses:
|
||||
'200':
|
||||
description: Graph status
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/GraphStatus'
|
||||
examples:
|
||||
ready:
|
||||
value:
|
||||
graphId: graph-01JF0XYZ
|
||||
status: ready
|
||||
builtAt: 2025-11-18 12:00:00+00:00
|
||||
'404':
|
||||
description: Graph not found
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
/graphs/{graphId}/nodes:
|
||||
get:
|
||||
summary: List graph nodes
|
||||
tags:
|
||||
- Graphs
|
||||
parameters:
|
||||
- name: graphId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- $ref: ../_shared/parameters/paging.yaml#/parameters/LimitParam
|
||||
- $ref: ../_shared/parameters/paging.yaml#/parameters/CursorParam
|
||||
responses:
|
||||
'200':
|
||||
description: Graph nodes page
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/GraphNodePage'
|
||||
examples:
|
||||
sample:
|
||||
value:
|
||||
nodes:
|
||||
- id: node-1
|
||||
kind: artifact
|
||||
label: registry.stella-ops.local/runtime/api
|
||||
- id: node-2
|
||||
kind: policy
|
||||
label: policy:baseline
|
||||
metadata:
|
||||
hasMore: true
|
||||
nextCursor: eyJuIjoiMjAyNS0xMS0xOCJ9
|
||||
'404':
|
||||
description: Graph not found
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: ../_shared/schemas/common.yaml#/schemas/ErrorEnvelope
|
||||
components:
|
||||
schemas:
|
||||
GraphStatus:
|
||||
type: object
|
||||
required:
|
||||
- graphId
|
||||
- status
|
||||
properties:
|
||||
graphId:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
enum:
|
||||
- building
|
||||
- ready
|
||||
- failed
|
||||
builtAt:
|
||||
type: string
|
||||
format: date-time
|
||||
GraphNodePage:
|
||||
type: object
|
||||
required:
|
||||
- nodes
|
||||
- metadata
|
||||
properties:
|
||||
nodes:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
required:
|
||||
- id
|
||||
- kind
|
||||
- label
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
kind:
|
||||
type: string
|
||||
label:
|
||||
type: string
|
||||
metadata:
|
||||
$ref: ../_shared/schemas/common.yaml#/schemas/PageMetadata
|
||||
220
src/Api/StellaOps.Api.OpenApi/orchestrator/openapi.yaml
Normal file
220
src/Api/StellaOps.Api.OpenApi/orchestrator/openapi.yaml
Normal file
@@ -0,0 +1,220 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: StellaOps Orchestrator API (stub)
|
||||
version: 0.0.1
|
||||
description: Health and job orchestration scaffold for Orchestrator service; replace
|
||||
with real contracts as contracts are authored.
|
||||
servers:
|
||||
- url: https://orchestrator.stellaops.local
|
||||
description: Example Orchestrator endpoint
|
||||
paths:
|
||||
/health:
|
||||
get:
|
||||
tags:
|
||||
- Health
|
||||
summary: Liveness probe
|
||||
responses:
|
||||
'200':
|
||||
description: Service is up
|
||||
content:
|
||||
application/json:
|
||||
examples:
|
||||
ok:
|
||||
value:
|
||||
status: ok
|
||||
service: orchestrator
|
||||
timestamp: '2025-11-18T00:00:00Z'
|
||||
'503':
|
||||
description: Service unhealthy or dependencies unavailable.
|
||||
content:
|
||||
application/json:
|
||||
examples:
|
||||
unhealthy:
|
||||
value:
|
||||
status: degraded
|
||||
service: orchestrator
|
||||
reason: scheduler queue unreachable
|
||||
timestamp: '2025-11-18T00:00:00Z'
|
||||
/healthz:
|
||||
get:
|
||||
summary: Service health
|
||||
tags:
|
||||
- Meta
|
||||
responses:
|
||||
'200':
|
||||
description: Service healthy
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: ../_shared/schemas/common.yaml#/schemas/HealthEnvelope
|
||||
examples:
|
||||
ok:
|
||||
summary: Healthy response
|
||||
value:
|
||||
status: ok
|
||||
service: orchestrator
|
||||
'503':
|
||||
description: Service unavailable
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: ../_shared/schemas/common.yaml#/schemas/ErrorEnvelope
|
||||
examples:
|
||||
unavailable:
|
||||
summary: Unhealthy response
|
||||
value:
|
||||
code: service_unavailable
|
||||
message: outbound queue lag exceeds threshold
|
||||
traceId: 1
|
||||
/jobs:
|
||||
post:
|
||||
tags:
|
||||
- Jobs
|
||||
summary: Submit a job to the orchestrator queue
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/JobCreateRequest'
|
||||
examples:
|
||||
scanJob:
|
||||
summary: Submit scan job
|
||||
value:
|
||||
kind: scan
|
||||
payload:
|
||||
artifactId: registry.stella-ops.local/runtime/api
|
||||
policyVersion: 2025.10.1
|
||||
priority: high
|
||||
tenant: tenant-alpha
|
||||
security:
|
||||
- OAuthClientCredentials: []
|
||||
- BearerAuth: []
|
||||
responses:
|
||||
'202':
|
||||
description: Job accepted
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/JobCreateResponse'
|
||||
examples:
|
||||
accepted:
|
||||
summary: Job enqueued
|
||||
value:
|
||||
jobId: job_01JF04ABCD
|
||||
status: queued
|
||||
queue: scan
|
||||
enqueuedAt: '2025-11-18T12:00:00Z'
|
||||
'400':
|
||||
description: Invalid request
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: ../_shared/schemas/common.yaml#/schemas/ErrorEnvelope
|
||||
examples:
|
||||
missingType:
|
||||
summary: Missing jobType
|
||||
value:
|
||||
code: orch.invalid_request
|
||||
message: jobType is required.
|
||||
traceId: 01JF04ERR1
|
||||
get:
|
||||
tags:
|
||||
- Jobs
|
||||
summary: List jobs
|
||||
parameters:
|
||||
- in: query
|
||||
name: status
|
||||
schema:
|
||||
type: string
|
||||
enum:
|
||||
- queued
|
||||
- running
|
||||
- failed
|
||||
- completed
|
||||
- $ref: ../_shared/parameters/paging.yaml#/parameters/LimitParam
|
||||
- $ref: ../_shared/parameters/tenant.yaml#/parameters/TenantParam
|
||||
responses:
|
||||
'200':
|
||||
description: Jobs page
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/JobSummary'
|
||||
examples:
|
||||
sample:
|
||||
value:
|
||||
- jobId: job_01JF04ABCD
|
||||
status: queued
|
||||
queue: scan
|
||||
enqueuedAt: '2025-11-18T12:00:00Z'
|
||||
- jobId: job_01JF04EFGH
|
||||
status: running
|
||||
queue: policy-eval
|
||||
enqueuedAt: '2025-11-18T11:55:00Z'
|
||||
startedAt: '2025-11-18T11:56:10Z'
|
||||
/jobs/{jobId}:
|
||||
get:
|
||||
tags:
|
||||
- Jobs
|
||||
summary: Get job status
|
||||
parameters:
|
||||
- name: jobId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Job status
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/JobSummary'
|
||||
examples:
|
||||
sample:
|
||||
value:
|
||||
jobId: job_01JF04ABCD
|
||||
status: queued
|
||||
queue: scan
|
||||
enqueuedAt: '2025-11-18T12:00:00Z'
|
||||
'404':
|
||||
description: Job not found
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
components:
|
||||
schemas:
|
||||
JobSummary:
|
||||
type: object
|
||||
required:
|
||||
- jobId
|
||||
- status
|
||||
- queue
|
||||
- enqueuedAt
|
||||
properties:
|
||||
jobId:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
enum:
|
||||
- queued
|
||||
- running
|
||||
- failed
|
||||
- completed
|
||||
queue:
|
||||
type: string
|
||||
enqueuedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
startedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
completedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
tenant:
|
||||
type: string
|
||||
162
src/Api/StellaOps.Api.OpenApi/policy/openapi.yaml
Normal file
162
src/Api/StellaOps.Api.OpenApi/policy/openapi.yaml
Normal file
@@ -0,0 +1,162 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: StellaOps Policy Engine API (stub)
|
||||
version: 0.0.1
|
||||
description: Health + evaluation scaffold for Policy Engine; replace with real contracts
|
||||
as authored.
|
||||
servers:
|
||||
- url: https://policy.stellaops.local
|
||||
description: Example Policy Engine endpoint
|
||||
paths:
|
||||
/health:
|
||||
get:
|
||||
tags:
|
||||
- Health
|
||||
summary: Liveness probe
|
||||
responses:
|
||||
'200':
|
||||
description: Service is up
|
||||
content:
|
||||
application/json:
|
||||
examples:
|
||||
ok:
|
||||
value:
|
||||
status: ok
|
||||
service: policy
|
||||
timestamp: '2025-11-18T00:00:00Z'
|
||||
'503':
|
||||
description: Service unhealthy or dependencies unavailable.
|
||||
content:
|
||||
application/json:
|
||||
examples:
|
||||
unhealthy:
|
||||
value:
|
||||
status: degraded
|
||||
service: policy
|
||||
reason: mongo unavailable
|
||||
timestamp: '2025-11-18T00:00:00Z'
|
||||
/healthz:
|
||||
get:
|
||||
summary: Service health
|
||||
tags:
|
||||
- Meta
|
||||
responses:
|
||||
'200':
|
||||
description: Service healthy
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: ../_shared/schemas/common.yaml#/schemas/HealthEnvelope
|
||||
examples:
|
||||
ok:
|
||||
summary: Healthy response
|
||||
value:
|
||||
status: ok
|
||||
service: policy
|
||||
'503':
|
||||
description: Service unavailable
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: ../_shared/schemas/common.yaml#/schemas/ErrorEnvelope
|
||||
examples:
|
||||
unavailable:
|
||||
summary: Unhealthy response
|
||||
value:
|
||||
code: service_unavailable
|
||||
message: projector backlog exceeds SLA
|
||||
traceId: 2
|
||||
/evaluate:
|
||||
post:
|
||||
tags:
|
||||
- Evaluation
|
||||
summary: Evaluate policy for an artifact
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/EvaluationRequest'
|
||||
examples:
|
||||
default:
|
||||
summary: Evaluate current policy for an artifact
|
||||
value:
|
||||
artifactId: registry.stella-ops.local/runtime/api
|
||||
policyVersion: 2025.10.1
|
||||
inputs:
|
||||
tenant: acme
|
||||
branch: main
|
||||
responses:
|
||||
'200':
|
||||
description: Evaluation succeeded
|
||||
content:
|
||||
application/json:
|
||||
examples:
|
||||
allow:
|
||||
summary: Allow decision with reasons
|
||||
value:
|
||||
decision: allow
|
||||
policyVersion: 2025.10.1
|
||||
traceId: 01JF040XYZ
|
||||
reasons:
|
||||
- signed
|
||||
- within SLO
|
||||
metadata:
|
||||
latencyMs: 42
|
||||
obligations:
|
||||
- record: evidence
|
||||
schema:
|
||||
$ref: '#/components/schemas/EvaluationResponse'
|
||||
'400':
|
||||
description: Invalid request
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: ../_shared/schemas/common.yaml#/schemas/ErrorEnvelope
|
||||
examples:
|
||||
missingArtifact:
|
||||
summary: Missing artifactId
|
||||
value:
|
||||
code: policy.invalid_request
|
||||
message: artifactId is required.
|
||||
traceId: 01JF041ERR
|
||||
security:
|
||||
- OAuthClientCredentials: []
|
||||
- BearerAuth: []
|
||||
components:
|
||||
schemas:
|
||||
EvaluationRequest:
|
||||
type: object
|
||||
required:
|
||||
- artifactId
|
||||
properties:
|
||||
artifactId:
|
||||
type: string
|
||||
example: registry.stella-ops.local/runtime/api
|
||||
policyVersion:
|
||||
type: string
|
||||
example: 2025.10.1
|
||||
inputs:
|
||||
type: object
|
||||
EvaluationResponse:
|
||||
type: object
|
||||
required:
|
||||
- decision
|
||||
properties:
|
||||
decision:
|
||||
type: string
|
||||
enum:
|
||||
- allow
|
||||
- deny
|
||||
policyVersion:
|
||||
type: string
|
||||
traceId:
|
||||
type: string
|
||||
reasons:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
obligations:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
129
src/Api/StellaOps.Api.OpenApi/scheduler/openapi.yaml
Normal file
129
src/Api/StellaOps.Api.OpenApi/scheduler/openapi.yaml
Normal file
@@ -0,0 +1,129 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: StellaOps Scheduler API (stub)
|
||||
version: 0.0.1
|
||||
description: Health and queue status scaffold for Scheduler service; replace with full contract as authored.
|
||||
servers:
|
||||
- url: https://scheduler.stellaops.local
|
||||
description: Example Scheduler endpoint
|
||||
paths:
|
||||
/health:
|
||||
get:
|
||||
tags:
|
||||
- Health
|
||||
summary: Liveness probe
|
||||
responses:
|
||||
'200':
|
||||
description: Service is up
|
||||
content:
|
||||
application/json:
|
||||
examples:
|
||||
ok:
|
||||
value:
|
||||
status: ok
|
||||
service: scheduler
|
||||
timestamp: '2025-11-18T00:00:00Z'
|
||||
'503':
|
||||
description: Service unhealthy or dependencies unavailable.
|
||||
content:
|
||||
application/json:
|
||||
examples:
|
||||
unhealthy:
|
||||
value:
|
||||
status: degraded
|
||||
service: scheduler
|
||||
reason: queue not reachable
|
||||
timestamp: '2025-11-18T00:00:00Z'
|
||||
/healthz:
|
||||
get:
|
||||
summary: Service health
|
||||
tags:
|
||||
- Meta
|
||||
responses:
|
||||
'200':
|
||||
description: Service healthy
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/HealthEnvelope'
|
||||
examples:
|
||||
ok:
|
||||
summary: Healthy response
|
||||
value:
|
||||
status: ok
|
||||
service: scheduler
|
||||
'503':
|
||||
description: Service unavailable
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
examples:
|
||||
unavailable:
|
||||
summary: Unhealthy response
|
||||
value:
|
||||
code: service_unavailable
|
||||
message: queue backlog exceeds threshold
|
||||
traceId: 4
|
||||
/queues/{name}:
|
||||
get:
|
||||
tags:
|
||||
- Queues
|
||||
summary: Get queue status
|
||||
parameters:
|
||||
- name: name
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
example: default
|
||||
responses:
|
||||
'200':
|
||||
description: Queue status
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/QueueStatus'
|
||||
examples:
|
||||
status:
|
||||
summary: Queue depth snapshot
|
||||
value:
|
||||
name: default
|
||||
depth: 12
|
||||
inflight: 2
|
||||
oldestAgeSeconds: 45
|
||||
updatedAt: '2025-11-18T12:00:00Z'
|
||||
'404':
|
||||
description: Queue not found
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
examples:
|
||||
notFound:
|
||||
summary: Queue missing
|
||||
value:
|
||||
code: scheduler.queue_not_found
|
||||
message: Queue default not found.
|
||||
traceId: 01JF04NF2
|
||||
components:
|
||||
schemas:
|
||||
QueueStatus:
|
||||
type: object
|
||||
required:
|
||||
- name
|
||||
- depth
|
||||
- inflight
|
||||
- updatedAt
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
depth:
|
||||
type: integer
|
||||
inflight:
|
||||
type: integer
|
||||
oldestAgeSeconds:
|
||||
type: integer
|
||||
updatedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
1542
src/Api/StellaOps.Api.OpenApi/stella.yaml
Normal file
1542
src/Api/StellaOps.Api.OpenApi/stella.yaml
Normal file
File diff suppressed because it is too large
Load Diff
10
src/Api/StellaOps.Api.OpenApi/tasks.md
Normal file
10
src/Api/StellaOps.Api.OpenApi/tasks.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# API Contracts Tasks
|
||||
|
||||
| Task | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| OAS-61-001 | DONE | Scaffold per-service OpenAPI 3.1 files with shared components, info blocks, and initial path stubs. |
|
||||
| OAS-61-002 | DONE (2025-11-18) | Composer (`compose.mjs`) emits `stella.yaml` with namespaced paths/components; CI job validates aggregate stays up to date. |
|
||||
| OAS-62-001 | DOING | Populate request/response examples for top 50 endpoints, including standard error envelope. |
|
||||
| OAS-62-002 | TODO | Add custom lint rules enforcing pagination, idempotency headers, naming conventions, and example coverage. |
|
||||
| OAS-63-001 | TODO | Implement compatibility diff tooling comparing previous release specs; classify breaking vs additive changes. |
|
||||
| OAS-63-002 | TODO | Add `/.well-known/openapi` discovery endpoint schema metadata (extensions, version info). |
|
||||
50
src/Attestor/StellaOps.Attestation.Tests/DsseHelperTests.cs
Normal file
50
src/Attestor/StellaOps.Attestation.Tests/DsseHelperTests.cs
Normal file
@@ -0,0 +1,50 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading.Tasks;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Attestation;
|
||||
using StellaOps.Attestor.Envelope;
|
||||
using Xunit;
|
||||
|
||||
public class DsseHelperTests
|
||||
{
|
||||
private sealed class FakeSigner : IAuthoritySigner
|
||||
{
|
||||
public Task<string> GetKeyIdAsync(System.Threading.CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult("fake-key");
|
||||
|
||||
public Task<byte[]> SignAsync(ReadOnlyMemory<byte> paePayload, System.Threading.CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(Convert.FromHexString("deadbeef"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task WrapAsync_ProducesDsseEnvelope()
|
||||
{
|
||||
var stmt = new InTotoStatement(
|
||||
Type: "https://in-toto.io/Statement/v1",
|
||||
Subject: new[] { new Subject("demo", new System.Collections.Generic.Dictionary<string, string> { { "sha256", "abcd" } }) },
|
||||
PredicateType: "demo/predicate",
|
||||
Predicate: new { hello = "world" });
|
||||
|
||||
var envelope = await DsseHelper.WrapAsync(stmt, new FakeSigner());
|
||||
|
||||
envelope.PayloadType.Should().Be("https://in-toto.io/Statement/v1");
|
||||
var roundtrip = JsonSerializer.Deserialize<InTotoStatement>(envelope.Payload.Span);
|
||||
roundtrip!.PredicateType.Should().Be("demo/predicate");
|
||||
envelope.Signatures.Should().ContainSingle();
|
||||
envelope.Signatures[0].KeyId.Should().Be("fake-key");
|
||||
envelope.Signatures[0].Signature.Should().Be(Convert.ToBase64String(Convert.FromHexString("deadbeef")));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PreAuthenticationEncoding_FollowsDsseSpec()
|
||||
{
|
||||
var payloadType = "example/type";
|
||||
var payload = Encoding.UTF8.GetBytes("{}");
|
||||
|
||||
var pae = DsseHelper.PreAuthenticationEncoding(payloadType, payload);
|
||||
pae.Should().ContainSubsequence(Encoding.UTF8.GetBytes(payloadType));
|
||||
pae.Should().ContainSubsequence(payload);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.Attestation/StellaOps.Attestation.csproj" />
|
||||
<PackageReference Include="xunit" Version="2.5.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.3" />
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
55
src/Attestor/StellaOps.Attestation/DsseHelper.cs
Normal file
55
src/Attestor/StellaOps.Attestation/DsseHelper.cs
Normal file
@@ -0,0 +1,55 @@
|
||||
using System;
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Attestor.Envelope;
|
||||
|
||||
namespace StellaOps.Attestation;
|
||||
|
||||
public static class DsseHelper
|
||||
{
|
||||
public static byte[] PreAuthenticationEncoding(string payloadType, ReadOnlySpan<byte> payload)
|
||||
{
|
||||
static byte[] Cat(params byte[][] parts)
|
||||
{
|
||||
var len = 0;
|
||||
foreach (var part in parts)
|
||||
{
|
||||
len += part.Length;
|
||||
}
|
||||
|
||||
var buf = new byte[len];
|
||||
var offset = 0;
|
||||
foreach (var part in parts)
|
||||
{
|
||||
Buffer.BlockCopy(part, 0, buf, offset, part.Length);
|
||||
offset += part.Length;
|
||||
}
|
||||
return buf;
|
||||
}
|
||||
|
||||
var header = Encoding.UTF8.GetBytes("DSSEv1");
|
||||
var pt = Encoding.UTF8.GetBytes(payloadType);
|
||||
var lenPt = Encoding.UTF8.GetBytes(pt.Length.ToString(CultureInfo.InvariantCulture));
|
||||
var lenPayload = Encoding.UTF8.GetBytes(payload.Length.ToString(CultureInfo.InvariantCulture));
|
||||
var space = Encoding.UTF8.GetBytes(" ");
|
||||
|
||||
return Cat(header, space, lenPt, space, pt, space, lenPayload, space, payload.ToArray());
|
||||
}
|
||||
|
||||
public static async Task<DsseEnvelope> WrapAsync(InTotoStatement statement, IAuthoritySigner signer, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(statement);
|
||||
ArgumentNullException.ThrowIfNull(signer);
|
||||
|
||||
var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(statement, statement.GetType());
|
||||
var pae = PreAuthenticationEncoding(statement.Type ?? string.Empty, payloadBytes);
|
||||
var signatureBytes = await signer.SignAsync(pae, cancellationToken).ConfigureAwait(false);
|
||||
var keyId = await signer.GetKeyIdAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var dsseSignature = DsseSignature.FromBytes(signatureBytes, keyId);
|
||||
return new DsseEnvelope(statement.Type, payloadBytes, new[] { dsseSignature });
|
||||
}
|
||||
}
|
||||
11
src/Attestor/StellaOps.Attestation/IAuthoritySigner.cs
Normal file
11
src/Attestor/StellaOps.Attestation/IAuthoritySigner.cs
Normal file
@@ -0,0 +1,11 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Attestation;
|
||||
|
||||
public interface IAuthoritySigner
|
||||
{
|
||||
Task<string> GetKeyIdAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
Task<byte[]> SignAsync(ReadOnlyMemory<byte> paePayload, CancellationToken cancellationToken = default);
|
||||
}
|
||||
14
src/Attestor/StellaOps.Attestation/Models.cs
Normal file
14
src/Attestor/StellaOps.Attestation/Models.cs
Normal file
@@ -0,0 +1,14 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestation;
|
||||
|
||||
public sealed record Subject(
|
||||
[property: JsonPropertyName("name")] string Name,
|
||||
[property: JsonPropertyName("digest")] IReadOnlyDictionary<string, string> Digest);
|
||||
|
||||
public sealed record InTotoStatement(
|
||||
[property: JsonPropertyName("_type")] string Type,
|
||||
[property: JsonPropertyName("subject")] IReadOnlyList<Subject> Subject,
|
||||
[property: JsonPropertyName("predicateType")] string PredicateType,
|
||||
[property: JsonPropertyName("predicate")] object Predicate);
|
||||
@@ -0,0 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -11,12 +11,27 @@ namespace StellaOps.Concelier.Core.Linksets;
|
||||
/// </summary>
|
||||
public sealed partial class AdvisoryLinksetMapper : IAdvisoryLinksetMapper
|
||||
{
|
||||
private static readonly HashSet<string> AliasSchemesOfInterest = new(new[]
|
||||
{
|
||||
AliasSchemes.Cve,
|
||||
AliasSchemes.Ghsa,
|
||||
AliasSchemes.OsV
|
||||
}, StringComparer.OrdinalIgnoreCase);
|
||||
private static readonly HashSet<string> AliasSchemesOfInterest = new(new[]
|
||||
{
|
||||
AliasSchemes.Cve,
|
||||
AliasSchemes.Ghsa,
|
||||
AliasSchemes.OsV,
|
||||
AliasSchemes.Rhsa,
|
||||
AliasSchemes.Usn,
|
||||
AliasSchemes.Dsa,
|
||||
AliasSchemes.SuseSu,
|
||||
AliasSchemes.Msrc,
|
||||
AliasSchemes.CiscoSa,
|
||||
AliasSchemes.OracleCpu,
|
||||
AliasSchemes.Vmsa,
|
||||
AliasSchemes.Apsb,
|
||||
AliasSchemes.Apa,
|
||||
AliasSchemes.AppleHt,
|
||||
AliasSchemes.Icsa,
|
||||
AliasSchemes.Jvndb,
|
||||
AliasSchemes.Jvn,
|
||||
AliasSchemes.Bdu
|
||||
}, StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public RawLinkset Map(AdvisoryRawDocument document)
|
||||
{
|
||||
|
||||
@@ -3,6 +3,7 @@ using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Normalization.SemVer;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
@@ -41,13 +42,14 @@ internal static class AdvisoryLinksetNormalization
|
||||
{
|
||||
var normalizedPurls = NormalizePurls(purlValues);
|
||||
var versions = ExtractVersions(normalizedPurls);
|
||||
var ranges = BuildVersionRanges(normalizedPurls);
|
||||
|
||||
if (normalizedPurls.Count == 0 && versions.Count == 0)
|
||||
if (normalizedPurls.Count == 0 && versions.Count == 0 && ranges.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new AdvisoryLinksetNormalized(normalizedPurls, versions, null, null);
|
||||
return new AdvisoryLinksetNormalized(normalizedPurls, versions, ranges, null);
|
||||
}
|
||||
|
||||
private static List<string> NormalizePurls(IEnumerable<string> purls)
|
||||
@@ -89,6 +91,55 @@ internal static class AdvisoryLinksetNormalization
|
||||
return versions.ToList();
|
||||
}
|
||||
|
||||
private static List<Dictionary<string, object?>> BuildVersionRanges(IReadOnlyCollection<string> purls)
|
||||
{
|
||||
var ranges = new List<Dictionary<string, object?>>();
|
||||
|
||||
foreach (var purl in purls)
|
||||
{
|
||||
var atIndex = purl.LastIndexOf('@');
|
||||
if (atIndex < 0 || atIndex >= purl.Length - 1)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var versionSegment = purl[(atIndex + 1)..];
|
||||
if (string.IsNullOrWhiteSpace(versionSegment))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!LooksLikeRange(versionSegment))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var rules = SemVerRangeRuleBuilder.BuildNormalizedRules(versionSegment, provenanceNote: $"purl:{purl}");
|
||||
foreach (var rule in rules)
|
||||
{
|
||||
ranges.Add(new Dictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
{ "scheme", rule.Scheme },
|
||||
{ "type", rule.Type },
|
||||
{ "min", rule.Min },
|
||||
{ "minInclusive", rule.MinInclusive },
|
||||
{ "max", rule.Max },
|
||||
{ "maxInclusive", rule.MaxInclusive },
|
||||
{ "value", rule.Value },
|
||||
{ "notes", rule.Notes }
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return ranges;
|
||||
}
|
||||
|
||||
private static bool LooksLikeRange(string value)
|
||||
{
|
||||
return value.IndexOfAny(new[] { '^', '~', '*', ' ', ',', '|', '>' , '<' }) >= 0 ||
|
||||
value.Contains("||", StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
private static double? CoerceConfidence(double? confidence)
|
||||
{
|
||||
if (!confidence.HasValue)
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
#nullable enable
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using StellaOps.Policy.AuthSignals;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
/// <summary>
|
||||
/// Maps advisory linksets into the shared Policy/Auth/Signals contract so policy enrichment tasks can start.
|
||||
/// This is a minimal, fact-only projection (no weighting or merge logic).
|
||||
/// </summary>
|
||||
public static class PolicyAuthSignalFactory
|
||||
{
|
||||
public static PolicyAuthSignal ToPolicyAuthSignal(AdvisoryLinkset linkset)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(linkset);
|
||||
|
||||
var firstPurl = linkset.Normalized?.Purls?.FirstOrDefault();
|
||||
|
||||
var evidence = new List<EvidenceRef>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Kind = "linkset",
|
||||
Uri = $"cas://linksets/{linkset.AdvisoryId}",
|
||||
Digest = "sha256:pending" // real digest filled when CAS manifests are available
|
||||
}
|
||||
};
|
||||
|
||||
return new PolicyAuthSignal
|
||||
{
|
||||
Id = linkset.AdvisoryId,
|
||||
Tenant = linkset.TenantId,
|
||||
Subject = firstPurl ?? $"advisory:{linkset.Source}:{linkset.AdvisoryId}",
|
||||
SignalType = "reachability",
|
||||
Source = linkset.Source,
|
||||
Confidence = linkset.Confidence,
|
||||
Evidence = evidence,
|
||||
Created = linkset.CreatedAt.UtcDateTime
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ using System.Text;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Models.Observations;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Observations;
|
||||
|
||||
@@ -251,10 +252,10 @@ public sealed class AdvisoryObservationQueryService : IAdvisoryObservationQueryS
|
||||
relationshipSet.Add(relationship);
|
||||
}
|
||||
|
||||
var (_, rawConfidence, rawConflicts) = AdvisoryLinksetNormalization.FromRawLinksetWithConfidence(observation.RawLinkset);
|
||||
confidence = Math.Min(confidence, rawConfidence ?? 1.0);
|
||||
var linksetProjection = AdvisoryLinksetNormalization.FromRawLinksetWithConfidence(observation.RawLinkset);
|
||||
confidence = Math.Min(confidence, linksetProjection.confidence ?? 1.0);
|
||||
|
||||
foreach (var conflict in rawConflicts)
|
||||
foreach (var conflict in linksetProjection.conflicts)
|
||||
{
|
||||
var key = $"{conflict.Field}|{conflict.Reason}|{string.Join('|', conflict.Values ?? Array.Empty<string>())}";
|
||||
if (conflictSet.Add(key))
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
#nullable enable
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Policy.AuthSignals;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Policy;
|
||||
|
||||
/// <summary>
|
||||
/// Temporary bridge to consume the shared Policy/Auth/Signals contract package so downstream POLICY tasks can start.
|
||||
/// </summary>
|
||||
public static class AuthSignalsPackage
|
||||
{
|
||||
public static PolicyAuthSignal CreateSample() => new()
|
||||
{
|
||||
Id = "sample",
|
||||
Tenant = "urn:tenant:sample",
|
||||
Subject = "purl:pkg:maven/org.example/app@1.0.0",
|
||||
SignalType = "reachability",
|
||||
Source = "concelier",
|
||||
Evidence = new List<EvidenceRef>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Kind = "linkset",
|
||||
Uri = "cas://linksets/sample",
|
||||
Digest = "sha256:stub"
|
||||
}
|
||||
},
|
||||
Created = DateTime.UtcNow
|
||||
};
|
||||
}
|
||||
@@ -13,6 +13,7 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Cronos" Version="0.10.0" />
|
||||
<PackageReference Include="StellaOps.Policy.AuthSignals" Version="0.1.0-alpha" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
|
||||
|
||||
@@ -108,6 +108,12 @@ public static class ServiceCollectionExtensions
|
||||
return database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
});
|
||||
|
||||
services.AddSingleton<IMongoCollection<AdvisoryLinksetDocument>>(static sp =>
|
||||
{
|
||||
var database = sp.GetRequiredService<IMongoDatabase>();
|
||||
return database.GetCollection<AdvisoryLinksetDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
});
|
||||
|
||||
services.AddHostedService<RawDocumentRetentionService>();
|
||||
|
||||
services.AddSingleton<MongoMigrationRunner>();
|
||||
|
||||
@@ -79,11 +79,11 @@ public sealed class AdvisoryLinksetMapperTests
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Map_DeduplicatesValuesButRetainsMultipleOrigins()
|
||||
{
|
||||
using var contentDoc = JsonDocument.Parse(
|
||||
"""
|
||||
{
|
||||
public void Map_DeduplicatesValuesButRetainsMultipleOrigins()
|
||||
{
|
||||
using var contentDoc = JsonDocument.Parse(
|
||||
"""
|
||||
{
|
||||
"aliases": ["CVE-2025-0002", "CVE-2025-0002"],
|
||||
"packages": [
|
||||
{ "coordinates": "pkg:npm/package-b@2.0.0" },
|
||||
@@ -119,7 +119,36 @@ public sealed class AdvisoryLinksetMapperTests
|
||||
|
||||
Assert.Contains("/content/raw/aliases/0", result.ReconciledFrom);
|
||||
Assert.Contains("/content/raw/aliases/1", result.ReconciledFrom);
|
||||
Assert.Contains("/content/raw/packages/0/coordinates", result.ReconciledFrom);
|
||||
Assert.Contains("/content/raw/packages/1/coordinates", result.ReconciledFrom);
|
||||
}
|
||||
}
|
||||
Assert.Contains("/content/raw/packages/0/coordinates", result.ReconciledFrom);
|
||||
Assert.Contains("/content/raw/packages/1/coordinates", result.ReconciledFrom);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Map_IncludesVendorAliasesNeededForPolicyEquivalence()
|
||||
{
|
||||
var document = new AdvisoryRawDocument(
|
||||
Tenant: "tenant-a",
|
||||
Source: new RawSourceMetadata("vendor", "connector", "1.0.0"),
|
||||
Upstream: new RawUpstreamMetadata(
|
||||
UpstreamId: "RHSA-2025:0010",
|
||||
DocumentVersion: "1",
|
||||
RetrievedAt: DateTimeOffset.UtcNow,
|
||||
ContentHash: "sha256:ghi",
|
||||
Signature: new RawSignatureMetadata(false),
|
||||
Provenance: ImmutableDictionary<string, string>.Empty),
|
||||
Content: new RawContent(
|
||||
Format: "rhsa",
|
||||
SpecVersion: "1.0",
|
||||
Raw: JsonDocument.Parse("""{"advisory_id":"RHSA-2025:0010"}""").RootElement.Clone()),
|
||||
Identifiers: new RawIdentifiers(
|
||||
Aliases: ImmutableArray.Create("RHSA-2025:0010"),
|
||||
PrimaryId: "RHSA-2025:0010"),
|
||||
Linkset: new RawLinkset());
|
||||
|
||||
var mapper = new AdvisoryLinksetMapper();
|
||||
|
||||
var result = mapper.Map(document);
|
||||
|
||||
Assert.Contains("rhsa-2025:0010", result.Aliases);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Tests.Linksets;
|
||||
|
||||
public sealed class AdvisoryLinksetNormalizationTests
|
||||
{
|
||||
[Fact]
|
||||
public void FromRawLinksetWithConfidence_EmitsSemVerRangesForCaretVersions()
|
||||
{
|
||||
var linkset = new RawLinkset
|
||||
{
|
||||
PackageUrls = ImmutableArray.Create("pkg:npm/example@^1.2.3")
|
||||
};
|
||||
|
||||
var (normalized, _, _) = AdvisoryLinksetNormalization.FromRawLinksetWithConfidence(linkset);
|
||||
|
||||
normalized.Should().NotBeNull();
|
||||
normalized!.Ranges.Should().NotBeNull();
|
||||
normalized.Ranges!.Should().ContainSingle();
|
||||
|
||||
var range = normalized.Ranges![0];
|
||||
range["scheme"].Should().Be("semver");
|
||||
range["type"].Should().Be("range");
|
||||
range["min"].Should().Be("1.2.3");
|
||||
range["minInclusive"].Should().Be(true);
|
||||
range["max"].Should().Be("2.0.0");
|
||||
range["maxInclusive"].Should().Be(false);
|
||||
range["notes"].Should().Be("purl:pkg:npm/example@^1.2.3");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Tests.Linksets;
|
||||
|
||||
public class PolicyAuthSignalFactoryTests
|
||||
{
|
||||
[Fact]
|
||||
public void ToPolicyAuthSignal_maps_basic_fields()
|
||||
{
|
||||
var linkset = new AdvisoryLinkset(
|
||||
TenantId: "urn:tenant:demo",
|
||||
Source: "ghsa",
|
||||
AdvisoryId: "GHSA-1234",
|
||||
ObservationIds: ImmutableArray.Create("obs-1"),
|
||||
Normalized: new AdvisoryLinksetNormalized(
|
||||
Purls: new[] { "purl:pkg:maven/org.example/app@1.2.3" },
|
||||
Versions: Array.Empty<string>(),
|
||||
Ranges: null,
|
||||
Severities: null),
|
||||
Provenance: null,
|
||||
Confidence: 0.9,
|
||||
Conflicts: null,
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
BuiltByJobId: null);
|
||||
|
||||
var signal = PolicyAuthSignalFactory.ToPolicyAuthSignal(linkset);
|
||||
|
||||
signal.Id.Should().Be("GHSA-1234");
|
||||
signal.Tenant.Should().Be("urn:tenant:demo");
|
||||
signal.Subject.Should().Be("purl:pkg:maven/org.example/app@1.2.3");
|
||||
signal.Source.Should().Be("ghsa");
|
||||
signal.SignalType.Should().Be("reachability");
|
||||
signal.Evidence.Should().HaveCount(1);
|
||||
signal.Evidence[0].Uri.Should().Contain("GHSA-1234");
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ using System.Collections.Immutable;
|
||||
using System.Reflection;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Concelier.Core.Observations;
|
||||
using StellaOps.Concelier.Models.Observations;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
@@ -41,13 +42,16 @@ public sealed class AdvisoryObservationAggregationTests
|
||||
{
|
||||
var linkset = new RawLinkset
|
||||
{
|
||||
Notes = new Dictionary<string, string>
|
||||
Notes = ImmutableDictionary.CreateRange(new[]
|
||||
{
|
||||
{ "severity", "disagree" }
|
||||
}
|
||||
new KeyValuePair<string, string>("severity", "disagree")
|
||||
})
|
||||
};
|
||||
|
||||
var (normalized, confidence, conflicts) = AdvisoryLinksetNormalization.FromRawLinksetWithConfidence(linkset);
|
||||
var result = AdvisoryLinksetNormalization.FromRawLinksetWithConfidence(linkset);
|
||||
var normalized = result.normalized;
|
||||
var confidence = result.confidence;
|
||||
var conflicts = result.conflicts;
|
||||
|
||||
Assert.Equal(0.5, confidence);
|
||||
Assert.Single(conflicts);
|
||||
|
||||
@@ -10,6 +10,7 @@ using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Aoc;
|
||||
using StellaOps.Concelier.Core.Aoc;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Concelier.Core.Observations;
|
||||
using StellaOps.Concelier.Core.Raw;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Ingestion.Telemetry;
|
||||
@@ -194,37 +195,10 @@ public sealed class AdvisoryRawServiceTests
|
||||
|
||||
private sealed class StubObservationFactory : IAdvisoryObservationFactory
|
||||
{
|
||||
public Models.Observations.AdvisoryObservation Create(Models.Advisory advisory, string tenant, string source, RawModels.AdvisoryRawDocument raw, string advisoryKey, string observationId, DateTimeOffset createdAt)
|
||||
{
|
||||
var upstream = new Models.Observations.AdvisoryObservationUpstream(
|
||||
upstreamId: raw.Upstream.UpstreamId,
|
||||
documentVersion: raw.Upstream.DocumentVersion,
|
||||
fetchedAt: raw.Upstream.RetrievedAt ?? createdAt,
|
||||
receivedAt: createdAt,
|
||||
contentHash: raw.Upstream.ContentHash,
|
||||
signature: new Models.Observations.AdvisoryObservationSignature(raw.Upstream.Signature.Present, raw.Upstream.Signature.Format, raw.Upstream.Signature.KeyId, raw.Upstream.Signature.Signature),
|
||||
metadata: raw.Upstream.Provenance);
|
||||
private readonly AdvisoryObservationFactory _inner = new();
|
||||
|
||||
var content = new Models.Observations.AdvisoryObservationContent(raw.Content.Format, raw.Content.SpecVersion, JsonDocument.Parse(raw.Content.Raw.GetRawText()).RootElement);
|
||||
|
||||
var linkset = new Models.Observations.AdvisoryObservationLinkset(
|
||||
raw.Linkset.Aliases,
|
||||
raw.Linkset.PackageUrls,
|
||||
raw.Linkset.Cpes,
|
||||
ImmutableArray<Models.Observations.AdvisoryObservationReference>.Empty);
|
||||
|
||||
var rawLinkset = raw.Linkset;
|
||||
|
||||
return new Models.Observations.AdvisoryObservation(
|
||||
observationId,
|
||||
tenant,
|
||||
new Models.Observations.AdvisoryObservationSource(source, "stream", "api"),
|
||||
upstream,
|
||||
content,
|
||||
linkset,
|
||||
rawLinkset,
|
||||
createdAt);
|
||||
}
|
||||
public Models.Observations.AdvisoryObservation Create(RawModels.AdvisoryRawDocument rawDocument, DateTimeOffset? observedAt = null)
|
||||
=> _inner.Create(rawDocument, observedAt);
|
||||
}
|
||||
|
||||
private static AdvisoryRawDocument CreateDocument()
|
||||
|
||||
@@ -10,5 +10,6 @@
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Ingestion.Telemetry/StellaOps.Ingestion.Telemetry.csproj" />
|
||||
<ProjectReference Include="../../../Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj" />
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" PrivateAssets="All" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,132 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Models.Observations;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Concelier.WebService.Services;
|
||||
using StellaOps.Cryptography;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.WebService.Tests;
|
||||
|
||||
public class AdvisoryChunkBuilderTests
|
||||
{
|
||||
private readonly ICryptoHash _hash = CryptoHashFactory.CreateDefault();
|
||||
|
||||
[Fact]
|
||||
public void Build_UsesJsonPointerFromMaskForObservationPath()
|
||||
{
|
||||
var recordedAt = DateTimeOffset.Parse("2025-11-18T00:00:00Z", CultureInfo.InvariantCulture);
|
||||
var observationId = "obs-1";
|
||||
var observation = BuildObservation(observationId);
|
||||
var advisory = BuildAdvisory(recordedAt, observationId, new[] { "/references/0" });
|
||||
|
||||
var options = new AdvisoryChunkBuildOptions(
|
||||
advisory.AdvisoryKey,
|
||||
fingerprint: "fp",
|
||||
chunkLimit: 5,
|
||||
observationLimit: 5,
|
||||
sectionFilter: ImmutableHashSet.Create("workaround"),
|
||||
formatFilter: ImmutableHashSet<string>.Empty,
|
||||
minimumLength: 1);
|
||||
|
||||
var builder = new AdvisoryChunkBuilder(_hash);
|
||||
var result = builder.Build(options, advisory, new[] { observation });
|
||||
|
||||
var entry = Assert.Single(result.Response.Entries);
|
||||
Assert.Equal("/references/0", entry.Provenance.ObservationPath);
|
||||
Assert.Contains("/references/0", entry.Provenance.FieldMask);
|
||||
|
||||
var expectedChunkId = ComputeChunkId(observationId, "/references/0");
|
||||
Assert.Equal(expectedChunkId, entry.ChunkId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_FallsBackToFieldPathWhenMaskMissing()
|
||||
{
|
||||
var recordedAt = DateTimeOffset.Parse("2025-11-18T00:00:00Z", CultureInfo.InvariantCulture);
|
||||
var observationId = "obs-2";
|
||||
var observation = BuildObservation(observationId);
|
||||
var advisory = BuildAdvisory(recordedAt, observationId, fieldMask: null);
|
||||
|
||||
var options = new AdvisoryChunkBuildOptions(
|
||||
advisory.AdvisoryKey,
|
||||
fingerprint: "fp",
|
||||
chunkLimit: 5,
|
||||
observationLimit: 5,
|
||||
sectionFilter: ImmutableHashSet.Create("workaround"),
|
||||
formatFilter: ImmutableHashSet<string>.Empty,
|
||||
minimumLength: 1);
|
||||
|
||||
var builder = new AdvisoryChunkBuilder(_hash);
|
||||
var result = builder.Build(options, advisory, new[] { observation });
|
||||
|
||||
var entry = Assert.Single(result.Response.Entries);
|
||||
Assert.Equal("/references/0", entry.Provenance.ObservationPath);
|
||||
Assert.Contains("/references/0", entry.Provenance.FieldMask);
|
||||
|
||||
var expectedChunkId = ComputeChunkId(observationId, "/references/0");
|
||||
Assert.Equal(expectedChunkId, entry.ChunkId);
|
||||
}
|
||||
|
||||
private Advisory BuildAdvisory(DateTimeOffset recordedAt, string observationId, IEnumerable<string>? fieldMask)
|
||||
{
|
||||
var provenance = fieldMask is null
|
||||
? new AdvisoryProvenance("nvd", "workaround", observationId, recordedAt)
|
||||
: new AdvisoryProvenance("nvd", "workaround", observationId, recordedAt, fieldMask);
|
||||
|
||||
var reference = new AdvisoryReference(
|
||||
url: "https://example.test/workaround",
|
||||
kind: "workaround",
|
||||
sourceTag: "Vendor guidance",
|
||||
summary: "Apply the workaround.",
|
||||
provenance);
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey: "CVE-2025-0001",
|
||||
title: "Test advisory",
|
||||
summary: "",
|
||||
language: "en",
|
||||
published: recordedAt,
|
||||
modified: recordedAt,
|
||||
severity: "high",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2025-0001" },
|
||||
references: new[] { reference },
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { new AdvisoryProvenance("nvd", "advisory", observationId, recordedAt) });
|
||||
}
|
||||
|
||||
private static AdvisoryObservation BuildObservation(string observationId)
|
||||
{
|
||||
var timestamp = DateTimeOffset.Parse("2025-11-18T00:00:00Z", CultureInfo.InvariantCulture);
|
||||
|
||||
return new AdvisoryObservation(
|
||||
observationId,
|
||||
tenant: "tenant-a",
|
||||
source: new AdvisoryObservationSource("nvd", "stream", "api"),
|
||||
upstream: new AdvisoryObservationUpstream(
|
||||
upstreamId: observationId,
|
||||
documentVersion: "1",
|
||||
fetchedAt: timestamp,
|
||||
receivedAt: timestamp,
|
||||
contentHash: "sha256:deadbeef",
|
||||
signature: new AdvisoryObservationSignature(present: false)),
|
||||
content: new AdvisoryObservationContent("csaf", "2.0", JsonNode.Parse("{}")!),
|
||||
linkset: new AdvisoryObservationLinkset(Array.Empty<string>(), Array.Empty<string>(), Array.Empty<AdvisoryObservationReference>()),
|
||||
rawLinkset: new RawLinkset(),
|
||||
createdAt: timestamp);
|
||||
}
|
||||
|
||||
private string ComputeChunkId(string documentId, string observationPath)
|
||||
{
|
||||
var input = string.Concat(documentId, '|', observationPath);
|
||||
var bytes = Encoding.UTF8.GetBytes(input);
|
||||
var digest = _hash.ComputeHash(bytes, HashAlgorithms.Sha256);
|
||||
return Convert.ToHexString(digest.AsSpan(0, 8));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,107 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.Concelier.Models.Observations;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Concelier.WebService.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.WebService.Tests;
|
||||
|
||||
public class AdvisoryChunkCacheKeyTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_NormalizesObservationOrdering()
|
||||
{
|
||||
var options = new AdvisoryChunkBuildOptions(
|
||||
AdvisoryKey: "CVE-2025-0001",
|
||||
Fingerprint: "fp",
|
||||
ChunkLimit: 10,
|
||||
ObservationLimit: 10,
|
||||
SectionFilter: ImmutableHashSet.Create("workaround"),
|
||||
FormatFilter: ImmutableHashSet<string>.Empty,
|
||||
MinimumLength: 8);
|
||||
|
||||
var first = BuildObservation("obs-1", "sha256:one", "2025-11-18T00:00:00Z");
|
||||
var second = BuildObservation("obs-2", "sha256:two", "2025-11-18T00:05:00Z");
|
||||
|
||||
var ordered = AdvisoryChunkCacheKey.Create("tenant-a", "CVE-2025-0001", options, new[] { first, second }, "fp");
|
||||
var reversed = AdvisoryChunkCacheKey.Create("tenant-a", "CVE-2025-0001", options, new[] { second, first }, "fp");
|
||||
|
||||
Assert.Equal(ordered.Value, reversed.Value);
|
||||
Assert.Equal(ordered.ComputeHash(), reversed.ComputeHash());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_NormalizesFilterCasing()
|
||||
{
|
||||
var optionsLower = new AdvisoryChunkBuildOptions(
|
||||
"CVE-2025-0002",
|
||||
Fingerprint: "fp",
|
||||
ChunkLimit: 5,
|
||||
ObservationLimit: 5,
|
||||
SectionFilter: ImmutableHashSet.Create("workaround", "fix"),
|
||||
FormatFilter: ImmutableHashSet.Create("ndjson"),
|
||||
MinimumLength: 1);
|
||||
|
||||
var optionsUpper = new AdvisoryChunkBuildOptions(
|
||||
"CVE-2025-0002",
|
||||
Fingerprint: "fp",
|
||||
ChunkLimit: 5,
|
||||
ObservationLimit: 5,
|
||||
SectionFilter: ImmutableHashSet.Create("WorkAround", "FIX"),
|
||||
FormatFilter: ImmutableHashSet.Create("NDJSON"),
|
||||
MinimumLength: 1);
|
||||
|
||||
var observation = BuildObservation("obs-3", "sha256:three", "2025-11-18T00:10:00Z");
|
||||
|
||||
var lower = AdvisoryChunkCacheKey.Create("tenant-a", "CVE-2025-0002", optionsLower, new[] { observation }, "fp");
|
||||
var upper = AdvisoryChunkCacheKey.Create("tenant-a", "CVE-2025-0002", optionsUpper, new[] { observation }, "fp");
|
||||
|
||||
Assert.Equal(lower.Value, upper.Value);
|
||||
Assert.Equal(lower.ComputeHash(), upper.ComputeHash());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_ChangesWhenContentHashDiffers()
|
||||
{
|
||||
var options = new AdvisoryChunkBuildOptions(
|
||||
"CVE-2025-0003",
|
||||
Fingerprint: "fp",
|
||||
ChunkLimit: 5,
|
||||
ObservationLimit: 5,
|
||||
SectionFilter: ImmutableHashSet<string>.Empty,
|
||||
FormatFilter: ImmutableHashSet<string>.Empty,
|
||||
MinimumLength: 1);
|
||||
|
||||
var original = BuildObservation("obs-4", "sha256:orig", "2025-11-18T00:15:00Z");
|
||||
var mutated = BuildObservation("obs-4", "sha256:mut", "2025-11-18T00:15:00Z");
|
||||
|
||||
var originalKey = AdvisoryChunkCacheKey.Create("tenant-a", "CVE-2025-0003", options, new[] { original }, "fp");
|
||||
var mutatedKey = AdvisoryChunkCacheKey.Create("tenant-a", "CVE-2025-0003", options, new[] { mutated }, "fp");
|
||||
|
||||
Assert.NotEqual(originalKey.Value, mutatedKey.Value);
|
||||
Assert.NotEqual(originalKey.ComputeHash(), mutatedKey.ComputeHash());
|
||||
}
|
||||
|
||||
private static AdvisoryObservation BuildObservation(string id, string contentHash, string timestamp)
|
||||
{
|
||||
var createdAt = DateTimeOffset.Parse(timestamp, CultureInfo.InvariantCulture);
|
||||
|
||||
return new AdvisoryObservation(
|
||||
id,
|
||||
tenant: "tenant-a",
|
||||
source: new AdvisoryObservationSource("nvd", "stream", "api"),
|
||||
upstream: new AdvisoryObservationUpstream(
|
||||
upstreamId: id,
|
||||
documentVersion: "1",
|
||||
fetchedAt: createdAt,
|
||||
receivedAt: createdAt,
|
||||
contentHash: contentHash,
|
||||
signature: new AdvisoryObservationSignature(false)),
|
||||
content: new AdvisoryObservationContent("csaf", "2.0", JsonNode.Parse("{}")!),
|
||||
linkset: new AdvisoryObservationLinkset(Array.Empty<string>(), Array.Empty<string>(), Array.Empty<AdvisoryObservationReference>()),
|
||||
rawLinkset: new RawLinkset(),
|
||||
createdAt: createdAt);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,177 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json.Nodes;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Models.Observations;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Concelier.WebService.Services;
|
||||
using StellaOps.Cryptography;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.WebService.Tests.Services;
|
||||
|
||||
public sealed class AdvisoryChunkBuilderTests
|
||||
{
|
||||
private static readonly DateTimeOffset RecordedAt = DateTimeOffset.Parse("2025-11-18T00:00:00Z");
|
||||
|
||||
[Fact]
|
||||
public void Build_UsesJsonPointerFromFieldMaskForObservationPath()
|
||||
{
|
||||
var observation = CreateObservation("tenant-a:obs-1", "sha256:abc123");
|
||||
var provenance = new AdvisoryProvenance(
|
||||
"nvd",
|
||||
"workaround",
|
||||
observation.ObservationId,
|
||||
RecordedAt,
|
||||
new[] { " /references/0/title " });
|
||||
|
||||
var advisory = CreateAdvisory("CVE-2025-0001", provenance);
|
||||
var builder = new AdvisoryChunkBuilder(new TestCryptoHash());
|
||||
|
||||
var options = new AdvisoryChunkBuildOptions(
|
||||
advisory.AdvisoryKey,
|
||||
"fingerprint-1",
|
||||
chunkLimit: 5,
|
||||
observationLimit: 5,
|
||||
SectionFilter: ImmutableHashSet<string>.Empty,
|
||||
FormatFilter: ImmutableHashSet<string>.Empty,
|
||||
MinimumLength: 0);
|
||||
|
||||
var result = builder.Build(options, advisory, new[] { observation });
|
||||
|
||||
var entry = Assert.Single(result.Response.Entries);
|
||||
Assert.Equal("/references/0/title", entry.Provenance.ObservationPath);
|
||||
Assert.Equal(observation.ObservationId, entry.Provenance.DocumentId);
|
||||
Assert.Equal(observation.Upstream.ContentHash, entry.Provenance.ContentHash);
|
||||
Assert.Equal(new[] { "/references/0/title" }, entry.Provenance.FieldMask);
|
||||
Assert.Equal(ComputeChunkId(observation.ObservationId, "/references/0/title"), entry.ChunkId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_FallsBackToFieldPathWhenMaskIsEmpty()
|
||||
{
|
||||
var observation = CreateObservation("tenant-b:obs-2", "sha256:def456");
|
||||
var provenance = new AdvisoryProvenance(
|
||||
"nvd",
|
||||
"workaround",
|
||||
observation.ObservationId,
|
||||
RecordedAt);
|
||||
|
||||
var advisory = CreateAdvisory("CVE-2025-0002", provenance);
|
||||
var builder = new AdvisoryChunkBuilder(new TestCryptoHash());
|
||||
|
||||
var options = new AdvisoryChunkBuildOptions(
|
||||
advisory.AdvisoryKey,
|
||||
"fingerprint-2",
|
||||
chunkLimit: 5,
|
||||
observationLimit: 5,
|
||||
SectionFilter: ImmutableHashSet<string>.Empty,
|
||||
FormatFilter: ImmutableHashSet<string>.Empty,
|
||||
MinimumLength: 0);
|
||||
|
||||
var result = builder.Build(options, advisory, new[] { observation });
|
||||
|
||||
var entry = Assert.Single(result.Response.Entries);
|
||||
Assert.Equal("/references/0", entry.Provenance.ObservationPath);
|
||||
Assert.Equal(observation.ObservationId, entry.Provenance.DocumentId);
|
||||
Assert.Equal(observation.Upstream.ContentHash, entry.Provenance.ContentHash);
|
||||
Assert.Equal(new[] { "/references/0" }, entry.Provenance.FieldMask);
|
||||
Assert.Equal(ComputeChunkId(observation.ObservationId, "/references/0"), entry.ChunkId);
|
||||
}
|
||||
|
||||
private static Advisory CreateAdvisory(string advisoryKey, AdvisoryProvenance provenance)
|
||||
{
|
||||
var reference = new AdvisoryReference(
|
||||
"https://vendor.example/workaround",
|
||||
kind: "workaround",
|
||||
sourceTag: "Vendor guidance",
|
||||
summary: "Apply configuration change",
|
||||
provenance: provenance);
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey,
|
||||
title: "Fixture advisory",
|
||||
summary: "Structured payload",
|
||||
language: "en",
|
||||
published: RecordedAt,
|
||||
modified: RecordedAt,
|
||||
severity: "critical",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { advisoryKey },
|
||||
references: new[] { reference },
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
}
|
||||
|
||||
private static AdvisoryObservation CreateObservation(string observationId, string contentHash)
|
||||
{
|
||||
var source = new AdvisoryObservationSource("nvd", "default", "v1");
|
||||
var upstream = new AdvisoryObservationUpstream(
|
||||
"upstream-id",
|
||||
documentVersion: "1",
|
||||
fetchedAt: DateTimeOffset.Parse("2025-11-17T00:00:00Z"),
|
||||
receivedAt: DateTimeOffset.Parse("2025-11-17T00:01:00Z"),
|
||||
contentHash: contentHash,
|
||||
signature: new AdvisoryObservationSignature(present: false, format: null, keyId: null, signature: null));
|
||||
|
||||
var content = new AdvisoryObservationContent(
|
||||
format: "json",
|
||||
specVersion: "1.0",
|
||||
raw: JsonNode.Parse("{}")!);
|
||||
|
||||
var linkset = new AdvisoryObservationLinkset(
|
||||
aliases: new[] { "CVE-2025-0001" },
|
||||
purls: Array.Empty<string>(),
|
||||
cpes: Array.Empty<string>(),
|
||||
references: Enumerable.Empty<AdvisoryObservationReference>());
|
||||
|
||||
return new AdvisoryObservation(
|
||||
observationId,
|
||||
tenant: "tenant-a",
|
||||
source,
|
||||
upstream,
|
||||
content,
|
||||
linkset,
|
||||
rawLinkset: new RawLinkset(),
|
||||
createdAt: DateTimeOffset.Parse("2025-11-17T01:00:00Z"));
|
||||
}
|
||||
|
||||
private static string ComputeChunkId(string documentId, string observationPath)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(string.Concat(documentId, '|', observationPath));
|
||||
var digest = SHA256.HashData(bytes);
|
||||
return Convert.ToHexString(digest.AsSpan(0, 8));
|
||||
}
|
||||
|
||||
private sealed class TestCryptoHash : ICryptoHash
|
||||
{
|
||||
public byte[] ComputeHash(ReadOnlySpan<byte> data, string? algorithmId = null)
|
||||
=> SHA256.HashData(data.ToArray());
|
||||
|
||||
public string ComputeHashHex(ReadOnlySpan<byte> data, string? algorithmId = null)
|
||||
=> Convert.ToHexString(ComputeHash(data, algorithmId)).ToLowerInvariant();
|
||||
|
||||
public string ComputeHashBase64(ReadOnlySpan<byte> data, string? algorithmId = null)
|
||||
=> Convert.ToBase64String(ComputeHash(data, algorithmId));
|
||||
|
||||
public async ValueTask<byte[]> ComputeHashAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var memory = new MemoryStream();
|
||||
await stream.CopyToAsync(memory, cancellationToken).ConfigureAwait(false);
|
||||
return ComputeHash(memory.ToArray(), algorithmId);
|
||||
}
|
||||
|
||||
public async ValueTask<string> ComputeHashHexAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var bytes = await ComputeHashAsync(stream, algorithmId, cancellationToken).ConfigureAwait(false);
|
||||
return Convert.ToHexString(bytes).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,140 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Net.Http.Json;
|
||||
using EphemeralMongo;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Excititor.Storage.Mongo;
|
||||
using StellaOps.Excititor.WebService.Contracts;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Excititor.WebService.Tests;
|
||||
|
||||
public sealed class VexLinksetListEndpointTests : IDisposable
|
||||
{
|
||||
private readonly IMongoRunner _runner;
|
||||
private readonly TestWebApplicationFactory _factory;
|
||||
|
||||
public VexLinksetListEndpointTests()
|
||||
{
|
||||
_runner = MongoRunner.Run(new MongoRunnerOptions { UseSingleNodeReplicaSet = true });
|
||||
|
||||
_factory = new TestWebApplicationFactory(
|
||||
configureConfiguration: configuration =>
|
||||
{
|
||||
configuration.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["Excititor:Storage:Mongo:ConnectionString"] = _runner.ConnectionString,
|
||||
["Excititor:Storage:Mongo:DatabaseName"] = "linksets_tests",
|
||||
["Excititor:Storage:Mongo:DefaultTenant"] = "tests",
|
||||
});
|
||||
},
|
||||
configureServices: services =>
|
||||
{
|
||||
TestServiceOverrides.Apply(services);
|
||||
services.AddTestAuthentication();
|
||||
});
|
||||
|
||||
SeedObservations();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async void LinksetsEndpoint_GroupsByVulnAndProduct()
|
||||
{
|
||||
using var client = _factory.CreateClient(new WebApplicationFactoryClientOptions { AllowAutoRedirect = false });
|
||||
client.DefaultRequestHeaders.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", "vex.read");
|
||||
client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tests");
|
||||
|
||||
var response = await client.GetAsync("/v1/vex/linksets?vulnerabilityId=CVE-2025-0001&productKey=pkg:demo/app");
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<VexLinksetListResponse>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Single(payload!.Items);
|
||||
|
||||
var item = payload.Items.Single();
|
||||
Assert.Equal("CVE-2025-0001:pkg:demo/app", item.LinksetId);
|
||||
Assert.Equal("CVE-2025-0001", item.VulnerabilityId);
|
||||
Assert.Equal("pkg:demo/app", item.ProductKey);
|
||||
|
||||
item.Providers.Should().BeEquivalentTo(new[] { "provider-a", "provider-b" });
|
||||
item.Statuses.Should().BeEquivalentTo(new[] { "affected", "fixed" });
|
||||
item.Observations.Should().HaveCount(2);
|
||||
item.Observations.Should().Contain(o => o.ProviderId == "provider-a" && o.Status == "affected");
|
||||
item.Observations.Should().Contain(o => o.ProviderId == "provider-b" && o.Status == "fixed");
|
||||
}
|
||||
|
||||
private void SeedObservations()
|
||||
{
|
||||
var client = new MongoClient(_runner.ConnectionString);
|
||||
var database = client.GetDatabase("linksets_tests");
|
||||
var collection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Observations);
|
||||
|
||||
var observations = new List<BsonDocument>
|
||||
{
|
||||
new()
|
||||
{
|
||||
{ "_id", "obs-1" },
|
||||
{ "Tenant", "tests" },
|
||||
{ "ObservationId", "obs-1" },
|
||||
{ "VulnerabilityId", "cve-2025-0001" },
|
||||
{ "ProductKey", "pkg:demo/app" },
|
||||
{ "ProviderId", "provider-a" },
|
||||
{ "Status", "affected" },
|
||||
{ "StreamId", "stream" },
|
||||
{ "CreatedAt", DateTime.UtcNow },
|
||||
{ "Document", new BsonDocument { { "Digest", "digest-1" }, { "Format", "csaf" }, { "SourceUri", "https://example.test/a.json" } } },
|
||||
{ "Statements", new BsonArray
|
||||
{
|
||||
new BsonDocument
|
||||
{
|
||||
{ "VulnerabilityId", "cve-2025-0001" },
|
||||
{ "ProductKey", "pkg:demo/app" },
|
||||
{ "Status", "affected" },
|
||||
{ "LastObserved", DateTime.UtcNow },
|
||||
{ "Purl", "pkg:demo/app" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "Linkset", new BsonDocument { { "Purls", new BsonArray { "pkg:demo/app" } } } }
|
||||
},
|
||||
new()
|
||||
{
|
||||
{ "_id", "obs-2" },
|
||||
{ "Tenant", "tests" },
|
||||
{ "ObservationId", "obs-2" },
|
||||
{ "VulnerabilityId", "cve-2025-0001" },
|
||||
{ "ProductKey", "pkg:demo/app" },
|
||||
{ "ProviderId", "provider-b" },
|
||||
{ "Status", "fixed" },
|
||||
{ "StreamId", "stream" },
|
||||
{ "CreatedAt", DateTime.UtcNow.AddMinutes(1) },
|
||||
{ "Document", new BsonDocument { { "Digest", "digest-2" }, { "Format", "csaf" }, { "SourceUri", "https://example.test/b.json" } } },
|
||||
{ "Statements", new BsonArray
|
||||
{
|
||||
new BsonDocument
|
||||
{
|
||||
{ "VulnerabilityId", "cve-2025-0001" },
|
||||
{ "ProductKey", "pkg:demo/app" },
|
||||
{ "Status", "fixed" },
|
||||
{ "LastObserved", DateTime.UtcNow },
|
||||
{ "Purl", "pkg:demo/app" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "Linkset", new BsonDocument { { "Purls", new BsonArray { "pkg:demo/app" } } } }
|
||||
}
|
||||
};
|
||||
|
||||
collection.InsertMany(observations);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
_factory.Dispose();
|
||||
_runner.Dispose();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Graph.Indexer.Schema;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Tests;
|
||||
|
||||
public sealed class GraphIdentityTests
|
||||
{
|
||||
[Fact]
|
||||
public void ComputeNodeId_IsDeterministic_WhenTupleOrderChanges()
|
||||
{
|
||||
var tupleA = ImmutableDictionary<string, string>.Empty
|
||||
.Add("purl", "pkg:npm/test@1.0.0")
|
||||
.Add("reason", "declared")
|
||||
.Add("scope", "runtime");
|
||||
|
||||
var tupleB = ImmutableDictionary<string, string>.Empty
|
||||
.Add("scope", "runtime")
|
||||
.Add("reason", "declared")
|
||||
.Add("purl", "pkg:npm/test@1.0.0");
|
||||
|
||||
var idA = GraphIdentity.ComputeNodeId("Tenant-A", "Component", tupleA);
|
||||
var idB = GraphIdentity.ComputeNodeId("tenant-a", "component", tupleB);
|
||||
|
||||
Assert.Equal(idA, idB);
|
||||
Assert.StartsWith("gn:tenant-a:component:", idA);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeEdgeId_IsCaseInsensitiveExceptFingerprintFields()
|
||||
{
|
||||
var tupleLower = ImmutableDictionary<string, string>.Empty
|
||||
.Add("digest", "sha256:ABC")
|
||||
.Add("source", "sbom");
|
||||
|
||||
var tupleUpper = ImmutableDictionary<string, string>.Empty
|
||||
.Add("digest", "sha256:abc")
|
||||
.Add("source", "SBOM");
|
||||
|
||||
var edgeA = GraphIdentity.ComputeEdgeId("TENANT", "depends_on", tupleLower);
|
||||
var edgeB = GraphIdentity.ComputeEdgeId("tenant", "DEPENDS_ON", tupleUpper);
|
||||
|
||||
// digest key is case-sensitive by design; different casing produces different id when value changes.
|
||||
Assert.NotEqual(edgeA, edgeB);
|
||||
Assert.StartsWith("ge:tenant:DEPENDS_ON:", edgeA);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.Graph.Indexer.Documents;
|
||||
using StellaOps.Graph.Indexer.Ingestion.Sbom;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Tests;
|
||||
|
||||
public sealed class GraphSnapshotBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Build_ProducesDeterministicAdjacencyOrdering()
|
||||
{
|
||||
var snapshot = new SbomSnapshot
|
||||
{
|
||||
Tenant = "tenant-a",
|
||||
ArtifactDigest = "sha256:artifact",
|
||||
SbomDigest = "sha256:sbom",
|
||||
BaseArtifacts = Array.Empty<SbomBaseArtifact>()
|
||||
};
|
||||
|
||||
// Nodes intentionally provided out of order to verify deterministic sorting.
|
||||
var nodes = new[]
|
||||
{
|
||||
CreateComponentNode("node-b", "pkg:type/b@2.0"),
|
||||
CreateArtifactNode("node-artifact", snapshot.ArtifactDigest, snapshot.SbomDigest),
|
||||
CreateComponentNode("node-a", "pkg:type/a@1.0")
|
||||
}.ToImmutableArray();
|
||||
|
||||
// Edges also out of order; adjacency should normalize ordering.
|
||||
var edges = new[]
|
||||
{
|
||||
CreateEdge("edge-b", source: "node-artifact", target: "node-b"),
|
||||
CreateEdge("edge-a", source: "node-artifact", target: "node-a")
|
||||
}.ToImmutableArray();
|
||||
|
||||
var batch = new GraphBuildBatch(nodes, edges);
|
||||
var builder = new GraphSnapshotBuilder();
|
||||
|
||||
var result = builder.Build(snapshot, batch, generatedAt: DateTimeOffset.Parse("2025-11-18T00:00:00Z"));
|
||||
|
||||
// Node ordering is lexicographic by node id.
|
||||
var nodeIds = result.Adjacency.Nodes.Select(n => n.NodeId).ToArray();
|
||||
Assert.Equal(new[] { "node-a", "node-artifact", "node-b" }, nodeIds);
|
||||
|
||||
// Outgoing edges are sorted per-node.
|
||||
var artifactNode = result.Adjacency.Nodes.Single(n => n.NodeId == "node-artifact");
|
||||
Assert.Equal(new[] { "edge-a", "edge-b" }, artifactNode.OutgoingEdges.ToArray());
|
||||
|
||||
// Incoming edges preserved deterministically on targets.
|
||||
Assert.Equal(new[] { "edge-a" }, result.Adjacency.Nodes.Single(n => n.NodeId == "node-a").IncomingEdges.ToArray());
|
||||
Assert.Equal(new[] { "edge-b" }, result.Adjacency.Nodes.Single(n => n.NodeId == "node-b").IncomingEdges.ToArray());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_ComputesStableManifestHash_ForShuffledInputs()
|
||||
{
|
||||
var snapshot = new SbomSnapshot
|
||||
{
|
||||
Tenant = "tenant-b",
|
||||
ArtifactDigest = "sha256:artifact-b",
|
||||
SbomDigest = "sha256:sbom-b",
|
||||
BaseArtifacts = Array.Empty<SbomBaseArtifact>()
|
||||
};
|
||||
|
||||
var nodesA = new[]
|
||||
{
|
||||
CreateArtifactNode("art", snapshot.ArtifactDigest, snapshot.SbomDigest),
|
||||
CreateComponentNode("comp-1", "pkg:nuget/one@1.0.0"),
|
||||
CreateComponentNode("comp-2", "pkg:nuget/two@2.0.0")
|
||||
}.ToImmutableArray();
|
||||
|
||||
var edgesA = new[]
|
||||
{
|
||||
CreateEdge("e2", source: "art", target: "comp-2"),
|
||||
CreateEdge("e1", source: "art", target: "comp-1")
|
||||
}.ToImmutableArray();
|
||||
|
||||
var builder = new GraphSnapshotBuilder();
|
||||
var t = DateTimeOffset.Parse("2025-11-18T01:23:45Z");
|
||||
var baseline = builder.Build(snapshot, new GraphBuildBatch(nodesA, edgesA), t);
|
||||
|
||||
// Shuffle nodes/edges and ensure hash remains identical.
|
||||
var nodesB = nodesA.Reverse().ToImmutableArray();
|
||||
var edgesB = edgesA.Reverse().ToImmutableArray();
|
||||
var shuffled = builder.Build(snapshot, new GraphBuildBatch(nodesB, edgesB), t);
|
||||
|
||||
Assert.Equal(baseline.Manifest.Hash, shuffled.Manifest.Hash);
|
||||
Assert.Equal(baseline.Adjacency.Nodes.Select(n => n.NodeId), shuffled.Adjacency.Nodes.Select(n => n.NodeId));
|
||||
}
|
||||
|
||||
private static JsonObject CreateArtifactNode(string id, string artifactDigest, string sbomDigest)
|
||||
{
|
||||
var attributes = new JsonObject
|
||||
{
|
||||
["artifact_digest"] = artifactDigest,
|
||||
["sbom_digest"] = sbomDigest
|
||||
};
|
||||
|
||||
return new JsonObject
|
||||
{
|
||||
["id"] = id,
|
||||
["kind"] = "artifact",
|
||||
["attributes"] = attributes
|
||||
};
|
||||
}
|
||||
|
||||
private static JsonObject CreateComponentNode(string id, string purl)
|
||||
{
|
||||
var attributes = new JsonObject
|
||||
{
|
||||
["purl"] = purl
|
||||
};
|
||||
|
||||
return new JsonObject
|
||||
{
|
||||
["id"] = id,
|
||||
["kind"] = "component",
|
||||
["attributes"] = attributes
|
||||
};
|
||||
}
|
||||
|
||||
private static JsonObject CreateEdge(string id, string source, string target)
|
||||
{
|
||||
return new JsonObject
|
||||
{
|
||||
["id"] = id,
|
||||
["source"] = source,
|
||||
["target"] = target
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,109 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.Graph.Indexer.Documents;
|
||||
using StellaOps.Graph.Indexer.Ingestion.Sbom;
|
||||
using StellaOps.Graph.Indexer.Schema;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Tests;
|
||||
|
||||
public sealed class SbomSnapshotExporterTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ExportAsync_WritesCanonicalFilesWithStableHash()
|
||||
{
|
||||
var snapshot = new SbomSnapshot
|
||||
{
|
||||
Tenant = "tenant-c",
|
||||
ArtifactDigest = "sha256:artifact-c",
|
||||
SbomDigest = "sha256:sbom-c",
|
||||
BaseArtifacts = Array.Empty<SbomBaseArtifact>()
|
||||
};
|
||||
|
||||
var nodes = new[]
|
||||
{
|
||||
CreateArtifactNode("art", snapshot.ArtifactDigest, snapshot.SbomDigest),
|
||||
CreateComponentNode("comp", "pkg:npm/test@1.0.0")
|
||||
}.ToImmutableArray();
|
||||
|
||||
var edges = new[]
|
||||
{
|
||||
CreateEdge("edge-1", source: "art", target: "comp")
|
||||
}.ToImmutableArray();
|
||||
|
||||
var batch = new GraphBuildBatch(nodes, edges);
|
||||
var exporter = new SbomSnapshotExporter(new GraphSnapshotBuilder(), new FileSystemSnapshotFileWriter(_tempRoot));
|
||||
|
||||
await exporter.ExportAsync(snapshot, batch, CancellationToken.None);
|
||||
|
||||
var manifestPath = Path.Combine(_tempRoot, "manifest.json");
|
||||
var manifestJson = JsonNode.Parse(await File.ReadAllTextAsync(manifestPath))!.AsObject();
|
||||
|
||||
// Hash in manifest should equal recomputed canonical hash.
|
||||
var computed = GraphIdentity.ComputeDocumentHash(manifestJson);
|
||||
Assert.Equal(computed, manifestJson["hash"]!.GetValue<string>());
|
||||
|
||||
// Adjacency should contain both nodes and edges, deterministic ids.
|
||||
var adjacency = JsonNode.Parse(await File.ReadAllTextAsync(Path.Combine(_tempRoot, "adjacency.json")))!.AsObject();
|
||||
var nodesArray = adjacency["nodes"]!.AsArray();
|
||||
Assert.Equal(2, nodesArray.Count);
|
||||
Assert.Equal("art", nodesArray[0]! ["node_id"]!.GetValue<string>());
|
||||
|
||||
// nodes.jsonl and edges.jsonl should both exist and be non-empty.
|
||||
Assert.True(new FileInfo(Path.Combine(_tempRoot, "nodes.jsonl")).Length > 0);
|
||||
Assert.True(new FileInfo(Path.Combine(_tempRoot, "edges.jsonl")).Length > 0);
|
||||
}
|
||||
|
||||
public SbomSnapshotExporterTests()
|
||||
{
|
||||
_tempRoot = Path.Combine(Path.GetTempPath(), "graph-snapshot-tests", Guid.NewGuid().ToString("n"));
|
||||
Directory.CreateDirectory(_tempRoot);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
try { Directory.Delete(_tempRoot, recursive: true); } catch { /* ignore */ }
|
||||
}
|
||||
|
||||
private readonly string _tempRoot;
|
||||
|
||||
private static JsonObject CreateArtifactNode(string id, string artifactDigest, string sbomDigest)
|
||||
{
|
||||
var attributes = new JsonObject
|
||||
{
|
||||
["artifact_digest"] = artifactDigest,
|
||||
["sbom_digest"] = sbomDigest
|
||||
};
|
||||
|
||||
return new JsonObject
|
||||
{
|
||||
["id"] = id,
|
||||
["kind"] = "artifact",
|
||||
["attributes"] = attributes
|
||||
};
|
||||
}
|
||||
|
||||
private static JsonObject CreateComponentNode(string id, string purl)
|
||||
{
|
||||
var attributes = new JsonObject
|
||||
{
|
||||
["purl"] = purl
|
||||
};
|
||||
|
||||
return new JsonObject
|
||||
{
|
||||
["id"] = id,
|
||||
["kind"] = "component",
|
||||
["attributes"] = attributes
|
||||
};
|
||||
}
|
||||
|
||||
private static JsonObject CreateEdge(string id, string source, string target)
|
||||
{
|
||||
return new JsonObject
|
||||
{
|
||||
["id"] = id,
|
||||
["source"] = source,
|
||||
["target"] = target
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../StellaOps.Graph.Indexer/StellaOps.Graph.Indexer.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,89 @@
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Notifier.Worker.Options;
|
||||
using StellaOps.Notifier.Worker.Processing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Notifier.Tests;
|
||||
|
||||
public class HttpEgressSloSinkTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task PublishAsync_NoWebhook_DoesNothing()
|
||||
{
|
||||
var handler = new StubHandler();
|
||||
var sink = CreateSink(handler, new EgressSloOptions { Webhook = null });
|
||||
|
||||
await sink.PublishAsync(BuildContext(), CancellationToken.None);
|
||||
|
||||
Assert.Equal(0, handler.SendCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PublishAsync_SendsWebhookWithPayload()
|
||||
{
|
||||
var handler = new StubHandler();
|
||||
var sink = CreateSink(handler, new EgressSloOptions { Webhook = "https://example.test/slo", TimeoutSeconds = 5 });
|
||||
|
||||
await sink.PublishAsync(BuildContext(), CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, handler.SendCount);
|
||||
var request = handler.LastRequest!;
|
||||
Assert.Equal(HttpMethod.Post, request.Method);
|
||||
Assert.Equal("https://example.test/slo", request.RequestUri!.ToString());
|
||||
}
|
||||
|
||||
private static HttpEgressSloSink CreateSink(HttpMessageHandler handler, EgressSloOptions options)
|
||||
{
|
||||
var factory = new StubHttpClientFactory(handler);
|
||||
return new HttpEgressSloSink(factory, Options.Create(options), NullLogger<HttpEgressSloSink>.Instance);
|
||||
}
|
||||
|
||||
private static EgressSloContext BuildContext()
|
||||
{
|
||||
var evt = Notify.Models.NotifyEvent.Create(
|
||||
Guid.NewGuid(),
|
||||
kind: "policy.violation",
|
||||
tenant: "tenant-a",
|
||||
ts: DateTimeOffset.UtcNow,
|
||||
payload: new System.Text.Json.Nodes.JsonObject(),
|
||||
actor: "tester",
|
||||
version: "1");
|
||||
|
||||
var ctx = EgressSloContext.FromNotifyEvent(evt);
|
||||
ctx.AddDelivery("Slack", "tmpl", evt.Kind);
|
||||
return ctx;
|
||||
}
|
||||
|
||||
private sealed class StubHandler : HttpMessageHandler
|
||||
{
|
||||
public int SendCount { get; private set; }
|
||||
public HttpRequestMessage? LastRequest { get; private set; }
|
||||
|
||||
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
|
||||
{
|
||||
SendCount++;
|
||||
LastRequest = request;
|
||||
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK));
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class StubHttpClientFactory : IHttpClientFactory
|
||||
{
|
||||
private readonly HttpMessageHandler _handler;
|
||||
|
||||
public StubHttpClientFactory(HttpMessageHandler handler)
|
||||
{
|
||||
_handler = handler;
|
||||
}
|
||||
|
||||
public HttpClient CreateClient(string name)
|
||||
{
|
||||
return new HttpClient(_handler, disposeHandler: false);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Notifier.Worker.Processing;
|
||||
|
||||
namespace StellaOps.Notifier.Tests.Support;
|
||||
|
||||
public sealed class TestEgressSloSink : IEgressSloSink
|
||||
{
|
||||
private readonly ConcurrentBag<EgressSloContext> _contexts = new();
|
||||
|
||||
public IReadOnlyCollection<EgressSloContext> Contexts => _contexts;
|
||||
|
||||
public Task PublishAsync(EgressSloContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
_contexts.Add(context);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
namespace StellaOps.Notifier.Worker.Options;
|
||||
|
||||
public sealed class EgressSloOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Webhook endpoint to receive SLO delivery signals. When null/empty, publishing is disabled.
|
||||
/// </summary>
|
||||
public string? Webhook { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Request timeout in seconds for the webhook call.
|
||||
/// </summary>
|
||||
public int TimeoutSeconds { get; set; } = 5;
|
||||
|
||||
public bool Enabled => !string.IsNullOrWhiteSpace(Webhook);
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Notify.Models;
|
||||
|
||||
namespace StellaOps.Notifier.Worker.Processing;
|
||||
|
||||
/// <summary>
|
||||
/// Tracks per-event delivery intents for SLO evaluation and webhook emission.
|
||||
/// </summary>
|
||||
internal sealed class EgressSloContext
|
||||
{
|
||||
private readonly List<EgressSloSignal> _signals = new();
|
||||
|
||||
public IReadOnlyList<EgressSloSignal> Signals => _signals;
|
||||
|
||||
public static EgressSloContext FromNotifyEvent(NotifyEvent notifyEvent)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(notifyEvent);
|
||||
return new EgressSloContext
|
||||
{
|
||||
EventId = notifyEvent.EventId,
|
||||
TenantId = notifyEvent.Tenant,
|
||||
EventKind = notifyEvent.Kind,
|
||||
OccurredAt = notifyEvent.Ts
|
||||
};
|
||||
}
|
||||
|
||||
public Guid EventId { get; private set; }
|
||||
|
||||
public string TenantId { get; private set; } = string.Empty;
|
||||
|
||||
public string EventKind { get; private set; } = string.Empty;
|
||||
|
||||
public DateTimeOffset OccurredAt { get; private set; }
|
||||
|
||||
public void AddDelivery(string channelType, string template, string kind)
|
||||
{
|
||||
_signals.Add(new EgressSloSignal(channelType, template, kind, OccurredAt));
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record EgressSloSignal(
|
||||
string Channel,
|
||||
string Template,
|
||||
string Kind,
|
||||
DateTimeOffset OccurredAt);
|
||||
@@ -0,0 +1,93 @@
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Notifier.Worker.Options;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Notifier.Worker.Processing;
|
||||
|
||||
internal sealed class HttpEgressSloSink : IEgressSloSink
|
||||
{
|
||||
private readonly IHttpClientFactory _clientFactory;
|
||||
private readonly EgressSloOptions _options;
|
||||
private readonly ILogger<HttpEgressSloSink> _logger;
|
||||
|
||||
public HttpEgressSloSink(
|
||||
IHttpClientFactory clientFactory,
|
||||
IOptions<EgressSloOptions> options,
|
||||
ILogger<HttpEgressSloSink> logger)
|
||||
{
|
||||
_clientFactory = clientFactory ?? throw new ArgumentNullException(nameof(clientFactory));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task PublishAsync(EgressSloContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
if (context is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(context));
|
||||
}
|
||||
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||
if (_options.TimeoutSeconds > 0)
|
||||
{
|
||||
linkedCts.CancelAfter(TimeSpan.FromSeconds(_options.TimeoutSeconds));
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var client = _clientFactory.CreateClient("notifier-slo-webhook");
|
||||
var payload = Map(context);
|
||||
|
||||
var response = await client.PostAsJsonAsync(_options.Webhook, payload, linkedCts.Token).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"SLO webhook returned non-success status {StatusCode} for event {EventId} (tenant {TenantId}).",
|
||||
(int)response.StatusCode,
|
||||
context.EventId,
|
||||
context.TenantId);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (linkedCts.IsCancellationRequested)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"SLO webhook timed out after {TimeoutSeconds}s for event {EventId} (tenant {TenantId}).",
|
||||
_options.TimeoutSeconds,
|
||||
context.EventId,
|
||||
context.TenantId);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
ex,
|
||||
"Failed to publish SLO webhook for event {EventId} (tenant {TenantId}).",
|
||||
context.EventId,
|
||||
context.TenantId);
|
||||
}
|
||||
}
|
||||
|
||||
private static object Map(EgressSloContext context)
|
||||
=> new
|
||||
{
|
||||
context.EventId,
|
||||
context.TenantId,
|
||||
context.EventKind,
|
||||
context.OccurredAt,
|
||||
deliveries = context.Signals.Select(signal => new
|
||||
{
|
||||
signal.Channel,
|
||||
signal.Template,
|
||||
signal.Kind,
|
||||
signal.OccurredAt
|
||||
})
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Notifier.Worker.Processing;
|
||||
|
||||
internal interface IEgressSloSink
|
||||
{
|
||||
Task PublishAsync(EgressSloContext context, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
internal sealed class NullEgressSloSink : IEgressSloSink
|
||||
{
|
||||
public Task PublishAsync(EgressSloContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
# QA Playbook — Attestation Routing (NOTIFY-ATTEST-74-002)
|
||||
|
||||
## Goal
|
||||
Verify attestation-related notification flows using the sample rules shipped in `docs/attestation-rules.sample.json`.
|
||||
|
||||
## Prereqs
|
||||
- Notifier WebService + Worker running against a QA tenant.
|
||||
- Channels configured for:
|
||||
- `email-kms` (SMTP bridge)
|
||||
- `webhook-kms` (internal hook)
|
||||
- `slack-soc` (Slack webhook)
|
||||
- `webhook-siem`
|
||||
- Templates pre-seeded from `offline/notifier/templates/attestation/*.json`.
|
||||
|
||||
## Steps
|
||||
1. Import rules and channels
|
||||
- `POST /api/v1/notify/rules:batch` with `docs/attestation-rules.sample.json` (replace `<tenant-id>`).
|
||||
- Verify rules are enabled.
|
||||
2. Emit events
|
||||
- Rotation: emit `authority.keys.rotated` with signer metadata and impacted tenants.
|
||||
- Revocation: `authority.keys.revoked`.
|
||||
- Transparency anomaly: `attestor.transparency.anomaly` and `attestor.transparency.witness.failed`.
|
||||
3. Validate deliveries
|
||||
- Confirm email + webhook for rotation/revocation (template `tmpl-attest-key-rotation`).
|
||||
- Confirm slack + webhook for transparency anomaly (template `tmpl-attest-transparency-anomaly`).
|
||||
- Check ledger/DB for rendered payloads with template keys and tenant id.
|
||||
4. Negative checks
|
||||
- Disabled channel should suppress delivery.
|
||||
- Missing template should surface as rule error.
|
||||
|
||||
## Evidence to capture
|
||||
- API responses for rule import and event POST.
|
||||
- Delivery records (IDs, channel, template key) per event.
|
||||
- Slack/email/webhook payload excerpts (hash or screenshot acceptable).
|
||||
|
||||
## Completion criteria
|
||||
- All four event kinds produce expected channels per sample rules without errors.
|
||||
- Ledger shows template IDs `tmpl-attest-key-rotation` and `tmpl-attest-transparency-anomaly`.
|
||||
- Failures (if any) documented with event payload and channel.
|
||||
@@ -0,0 +1,86 @@
|
||||
{
|
||||
"rules": [
|
||||
{
|
||||
"ruleId": "attest-key-rotation",
|
||||
"name": "Attestation key rotation/revocation",
|
||||
"enabled": true,
|
||||
"tenantId": "<tenant-id>",
|
||||
"match": {
|
||||
"eventKinds": [
|
||||
"authority.keys.rotated",
|
||||
"authority.keys.revoked"
|
||||
]
|
||||
},
|
||||
"actions": [
|
||||
{
|
||||
"actionId": "email-kms",
|
||||
"enabled": true,
|
||||
"channel": "email-kms",
|
||||
"template": "tmpl-attest-key-rotation"
|
||||
},
|
||||
{
|
||||
"actionId": "webhook-kms",
|
||||
"enabled": true,
|
||||
"channel": "webhook-kms",
|
||||
"template": "tmpl-attest-key-rotation"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"ruleId": "attest-transparency-anomaly",
|
||||
"name": "Transparency witness anomaly",
|
||||
"enabled": true,
|
||||
"tenantId": "<tenant-id>",
|
||||
"match": {
|
||||
"eventKinds": [
|
||||
"attestor.transparency.anomaly",
|
||||
"attestor.transparency.witness.failed"
|
||||
]
|
||||
},
|
||||
"actions": [
|
||||
{
|
||||
"actionId": "slack-soc",
|
||||
"enabled": true,
|
||||
"channel": "slack-soc",
|
||||
"template": "tmpl-attest-transparency-anomaly"
|
||||
},
|
||||
{
|
||||
"actionId": "webhook-siem",
|
||||
"enabled": true,
|
||||
"channel": "webhook-siem",
|
||||
"template": "tmpl-attest-transparency-anomaly"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"channels": [
|
||||
{
|
||||
"channelId": "email-kms",
|
||||
"type": "email",
|
||||
"name": "KMS security",
|
||||
"target": "kms-security@example.com",
|
||||
"secretRef": "ref://notify/channels/email/kms-security"
|
||||
},
|
||||
{
|
||||
"channelId": "webhook-kms",
|
||||
"type": "webhook",
|
||||
"name": "KMS webhook",
|
||||
"endpoint": "https://hooks.internal/kms",
|
||||
"secretRef": "ref://notify/channels/webhook/kms"
|
||||
},
|
||||
{
|
||||
"channelId": "slack-soc",
|
||||
"type": "slack",
|
||||
"name": "SOC high-priority",
|
||||
"endpoint": "https://hooks.slack.com/services/T000/B000/XYZ",
|
||||
"secretRef": "ref://notify/channels/slack/soc"
|
||||
},
|
||||
{
|
||||
"channelId": "webhook-siem",
|
||||
"type": "webhook",
|
||||
"name": "SIEM ingest",
|
||||
"endpoint": "https://siem.example.internal/hooks/notifier",
|
||||
"secretRef": "ref://notify/channels/webhook/siem"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
#nullable enable
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Policy.AuthSignals;
|
||||
|
||||
public record EvidenceRef
|
||||
{
|
||||
public string Kind { get; init; } = string.Empty; // linkset|runtime|attestation|bundle
|
||||
public string Uri { get; init; } = string.Empty;
|
||||
public string Digest { get; init; } = string.Empty;
|
||||
public string? Scope { get; init; }
|
||||
}
|
||||
|
||||
public record Provenance
|
||||
{
|
||||
public string? Pipeline { get; init; }
|
||||
public IReadOnlyList<string>? Inputs { get; init; }
|
||||
public string? Signer { get; init; }
|
||||
public Transparency? Transparency { get; init; }
|
||||
}
|
||||
|
||||
public record Transparency
|
||||
{
|
||||
public string? RekorUuid { get; init; }
|
||||
public string? SkipReason { get; init; }
|
||||
}
|
||||
|
||||
public record PolicyAuthSignal
|
||||
{
|
||||
public string Id { get; init; } = string.Empty;
|
||||
public string Tenant { get; init; } = string.Empty;
|
||||
public string Subject { get; init; } = string.Empty;
|
||||
public string SignalType { get; init; } = string.Empty; // reachability|attestation|risk|vex
|
||||
public string Source { get; init; } = string.Empty;
|
||||
public double? Confidence { get; init; }
|
||||
public IReadOnlyList<EvidenceRef> Evidence { get; init; } = Array.Empty<EvidenceRef>();
|
||||
public Provenance? Provenance { get; init; }
|
||||
public DateTime Created { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
|
||||
<PackageId>StellaOps.Policy.AuthSignals</PackageId>
|
||||
<Authors>StellaOps</Authors>
|
||||
<Company>StellaOps</Company>
|
||||
<Version>0.1.0-alpha</Version>
|
||||
<Description>Shared Policy/Authority/Signals contracts for advisory signals.</Description>
|
||||
<PackageOutputPath>../../../../local-nugets</PackageOutputPath>
|
||||
<IncludeSymbols>false</IncludeSymbols>
|
||||
<IncludeSource>false</IncludeSource>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,120 @@
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Deno.Internal.Runtime;
|
||||
|
||||
/// <summary>
|
||||
/// Optional harness that executes the emitted Deno runtime shim when an entrypoint is provided via environment variable.
|
||||
/// This keeps runtime capture opt-in and offline-friendly.
|
||||
/// </summary>
|
||||
internal static class DenoRuntimeTraceRunner
|
||||
{
|
||||
private const string EntrypointEnvVar = "STELLA_DENO_ENTRYPOINT";
|
||||
private const string BinaryEnvVar = "STELLA_DENO_BINARY";
|
||||
private const string RuntimeFileName = "deno-runtime.ndjson";
|
||||
|
||||
public static async Task<bool> TryExecuteAsync(
|
||||
LanguageAnalyzerContext context,
|
||||
ILogger? logger,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
var entrypoint = Environment.GetEnvironmentVariable(EntrypointEnvVar);
|
||||
if (string.IsNullOrWhiteSpace(entrypoint))
|
||||
{
|
||||
logger?.LogDebug("Deno runtime trace skipped: {EnvVar} not set", EntrypointEnvVar);
|
||||
return false;
|
||||
}
|
||||
|
||||
var entrypointPath = Path.GetFullPath(Path.Combine(context.RootPath, entrypoint));
|
||||
if (!File.Exists(entrypointPath))
|
||||
{
|
||||
logger?.LogWarning("Deno runtime trace skipped: entrypoint '{Entrypoint}' missing", entrypointPath);
|
||||
return false;
|
||||
}
|
||||
|
||||
var shimPath = Path.Combine(context.RootPath, DenoRuntimeShim.FileName);
|
||||
if (!File.Exists(shimPath))
|
||||
{
|
||||
await DenoRuntimeShim.WriteAsync(context.RootPath, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var binary = Environment.GetEnvironmentVariable(BinaryEnvVar);
|
||||
if (string.IsNullOrWhiteSpace(binary))
|
||||
{
|
||||
binary = "deno";
|
||||
}
|
||||
|
||||
var startInfo = new ProcessStartInfo
|
||||
{
|
||||
FileName = binary,
|
||||
WorkingDirectory = context.RootPath,
|
||||
RedirectStandardError = true,
|
||||
RedirectStandardOutput = true,
|
||||
UseShellExecute = false,
|
||||
};
|
||||
|
||||
startInfo.ArgumentList.Add("run");
|
||||
startInfo.ArgumentList.Add("--cached-only");
|
||||
startInfo.ArgumentList.Add("--allow-read");
|
||||
startInfo.ArgumentList.Add("--allow-env");
|
||||
startInfo.ArgumentList.Add("--quiet");
|
||||
startInfo.ArgumentList.Add(shimPath);
|
||||
|
||||
startInfo.Environment[EntrypointEnvVar] = entrypointPath;
|
||||
|
||||
try
|
||||
{
|
||||
using var process = Process.Start(startInfo);
|
||||
if (process is null)
|
||||
{
|
||||
logger?.LogWarning("Deno runtime trace skipped: failed to start 'deno' process");
|
||||
return false;
|
||||
}
|
||||
|
||||
await process.WaitForExitAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (process.ExitCode != 0)
|
||||
{
|
||||
var stderr = await process.StandardError.ReadToEndAsync().ConfigureAwait(false);
|
||||
logger?.LogWarning(
|
||||
"Deno runtime trace failed with exit code {ExitCode}. stderr: {Error}",
|
||||
process.ExitCode,
|
||||
Truncate(stderr));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger?.LogWarning(ex, "Deno runtime trace skipped: {Message}", ex.Message);
|
||||
return false;
|
||||
}
|
||||
|
||||
var runtimePath = Path.Combine(context.RootPath, RuntimeFileName);
|
||||
if (!File.Exists(runtimePath))
|
||||
{
|
||||
logger?.LogWarning(
|
||||
"Deno runtime trace finished but did not emit {RuntimeFile}",
|
||||
RuntimeFileName);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string Truncate(string? value, int maxLength = 400)
|
||||
{
|
||||
if (string.IsNullOrEmpty(value))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
return value.Length <= maxLength ? value : value[..maxLength];
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
internal sealed record NodeEntrypoint(
|
||||
string Path,
|
||||
string? BinName,
|
||||
string? MainField,
|
||||
string? ModuleField,
|
||||
string ConditionSet)
|
||||
{
|
||||
public static NodeEntrypoint Create(string path, string? binName, string? mainField, string? moduleField, IEnumerable<string>? conditions)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var conditionSet = NormalizeConditions(conditions);
|
||||
return new NodeEntrypoint(path, binName, mainField, moduleField, conditionSet);
|
||||
}
|
||||
|
||||
private static string NormalizeConditions(IEnumerable<string>? conditions)
|
||||
{
|
||||
if (conditions is null)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var distinct = conditions
|
||||
.Where(static c => !string.IsNullOrWhiteSpace(c))
|
||||
.Select(static c => c.Trim())
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.OrderBy(static c => c, StringComparer.Ordinal);
|
||||
|
||||
return string.Join(',', distinct);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
internal sealed record NodeImportEdge(
|
||||
string SourceFile,
|
||||
string TargetSpecifier,
|
||||
string Kind,
|
||||
string Evidence)
|
||||
{
|
||||
public string ComparisonKey => string.Concat(SourceFile, "|", TargetSpecifier, "|", Kind);
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using Esprima;
|
||||
using Esprima.Ast;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
internal static class NodeImportWalker
|
||||
{
|
||||
public static IReadOnlyList<NodeImportEdge> AnalyzeImports(string sourcePath, string content)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sourcePath);
|
||||
if (content is null)
|
||||
{
|
||||
return Array.Empty<NodeImportEdge>();
|
||||
}
|
||||
|
||||
Script script;
|
||||
try
|
||||
{
|
||||
script = new JavaScriptParser(content, new ParserOptions
|
||||
{
|
||||
Tolerant = true,
|
||||
AdaptRegexp = true,
|
||||
Source = sourcePath
|
||||
}).ParseScript();
|
||||
}
|
||||
catch (ParserException)
|
||||
{
|
||||
return Array.Empty<NodeImportEdge>();
|
||||
}
|
||||
|
||||
var edges = new List<NodeImportEdge>();
|
||||
Walk(script, sourcePath, edges);
|
||||
return edges.Count == 0
|
||||
? Array.Empty<NodeImportEdge>()
|
||||
: edges.OrderBy(e => e.ComparisonKey, StringComparer.Ordinal).ToArray();
|
||||
}
|
||||
|
||||
private static void Walk(Node node, string sourcePath, List<NodeImportEdge> edges)
|
||||
{
|
||||
switch (node)
|
||||
{
|
||||
case ImportDeclaration importDecl when !string.IsNullOrWhiteSpace(importDecl.Source?.StringValue):
|
||||
edges.Add(new NodeImportEdge(sourcePath, importDecl.Source.StringValue!, "import", BuildEvidence(importDecl.Loc)));
|
||||
break;
|
||||
case CallExpression call when IsRequire(call) && call.Arguments.FirstOrDefault() is Literal { Value: string target }:
|
||||
edges.Add(new NodeImportEdge(sourcePath, target, "require", BuildEvidence(call.Loc)));
|
||||
break;
|
||||
case ImportExpression importExp when importExp.Source is Literal { Value: string importTarget }:
|
||||
edges.Add(new NodeImportEdge(sourcePath, importTarget, "import()", BuildEvidence(importExp.Loc)));
|
||||
break;
|
||||
}
|
||||
|
||||
foreach (var child in node.ChildNodes)
|
||||
{
|
||||
Walk(child, sourcePath, edges);
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsRequire(CallExpression call)
|
||||
{
|
||||
return call.Callee is Identifier id && string.Equals(id.Name, "require", StringComparison.Ordinal)
|
||||
&& call.Arguments.Count == 1 && call.Arguments[0] is Literal { Value: string };
|
||||
}
|
||||
|
||||
private static string BuildEvidence(Location? loc)
|
||||
{
|
||||
if (loc is null)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var json = new JsonObject
|
||||
{
|
||||
["start"] = BuildPosition(loc.Start),
|
||||
["end"] = BuildPosition(loc.End)
|
||||
};
|
||||
|
||||
return json.ToJsonString(new JsonSerializerOptions { WriteIndented = false });
|
||||
}
|
||||
|
||||
private static JsonObject BuildPosition(Position pos)
|
||||
{
|
||||
return new JsonObject
|
||||
{
|
||||
["line"] = pos.Line,
|
||||
["column"] = pos.Column
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
internal sealed class NodePackage
|
||||
{
|
||||
@@ -80,6 +82,12 @@ internal sealed class NodePackage
|
||||
|
||||
public bool IsYarnPnp { get; }
|
||||
|
||||
private readonly List<NodeEntrypoint> _entrypoints = new();
|
||||
private readonly List<NodeImportEdge> _imports = new();
|
||||
|
||||
public IReadOnlyList<NodeEntrypoint> Entrypoints => _entrypoints;
|
||||
public IReadOnlyList<NodeImportEdge> Imports => _imports;
|
||||
|
||||
public string RelativePathNormalized => string.IsNullOrEmpty(RelativePath) ? string.Empty : RelativePath.Replace(Path.DirectorySeparatorChar, '/');
|
||||
|
||||
public string ComponentKey => $"purl::{Purl}";
|
||||
@@ -113,10 +121,43 @@ internal sealed class NodePackage
|
||||
LanguageEvidenceKind.Metadata,
|
||||
"package.json:scripts",
|
||||
locator,
|
||||
script.Command,
|
||||
script.Sha256));
|
||||
}
|
||||
|
||||
script.Command,
|
||||
script.Sha256));
|
||||
}
|
||||
|
||||
foreach (var entrypoint in _entrypoints)
|
||||
{
|
||||
var locator = string.IsNullOrEmpty(PackageJsonLocator)
|
||||
? "package.json#entrypoint"
|
||||
: $"{PackageJsonLocator}#entrypoint";
|
||||
|
||||
var content = string.Join(';', new[]
|
||||
{
|
||||
entrypoint.Path,
|
||||
entrypoint.BinName,
|
||||
entrypoint.MainField,
|
||||
entrypoint.ModuleField,
|
||||
entrypoint.ConditionSet
|
||||
}.Where(static v => !string.IsNullOrWhiteSpace(v)));
|
||||
|
||||
evidence.Add(new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.Metadata,
|
||||
"package.json:entrypoint",
|
||||
locator,
|
||||
content,
|
||||
sha256: null));
|
||||
}
|
||||
|
||||
foreach (var importEdge in _imports.OrderBy(static e => e.ComparisonKey, StringComparer.Ordinal))
|
||||
{
|
||||
evidence.Add(new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.Source,
|
||||
"node.import",
|
||||
importEdge.SourceFile,
|
||||
importEdge.TargetSpecifier,
|
||||
sha256: null));
|
||||
}
|
||||
|
||||
return evidence
|
||||
.OrderBy(static e => e.ComparisonKey, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
@@ -186,6 +227,33 @@ internal sealed class NodePackage
|
||||
}
|
||||
}
|
||||
|
||||
if (_entrypoints.Count > 0)
|
||||
{
|
||||
var paths = _entrypoints
|
||||
.Select(static ep => ep.Path)
|
||||
.OrderBy(static p => p, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
entries.Add(new KeyValuePair<string, string?>("entrypoint", string.Join(';', paths)));
|
||||
|
||||
var conditionSets = _entrypoints
|
||||
.Select(static ep => ep.ConditionSet)
|
||||
.Where(static cs => !string.IsNullOrWhiteSpace(cs))
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.OrderBy(static cs => cs, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
if (conditionSets.Length > 0)
|
||||
{
|
||||
entries.Add(new KeyValuePair<string, string?>("entrypoint.conditions", string.Join(';', conditionSets)));
|
||||
}
|
||||
}
|
||||
|
||||
if (_imports.Count > 0)
|
||||
{
|
||||
entries.Add(new KeyValuePair<string, string?>("imports", _imports.Count.ToString(CultureInfo.InvariantCulture)));
|
||||
}
|
||||
|
||||
if (HasInstallScripts)
|
||||
{
|
||||
entries.Add(new KeyValuePair<string, string?>("installScripts", "true"));
|
||||
@@ -230,6 +298,48 @@ internal sealed class NodePackage
|
||||
.OrderBy(static pair => pair.Key, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
public void AddEntrypoint(string path, string conditionSet, string? binName, string? mainField, string? moduleField)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(path))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var entry = NodeEntrypoint.Create(path.Replace(Path.DirectorySeparatorChar, '/'), binName, mainField, moduleField, ParseConditionSet(conditionSet));
|
||||
if (_entrypoints.Any(ep => string.Equals(ep.Path, entry.Path, StringComparison.Ordinal)))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_entrypoints.Add(entry);
|
||||
}
|
||||
|
||||
public void AddImport(string sourceFile, string targetSpecifier, string kind, string evidence)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(sourceFile) || string.IsNullOrWhiteSpace(targetSpecifier))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var edge = new NodeImportEdge(sourceFile.Replace(Path.DirectorySeparatorChar, '/'), targetSpecifier.Trim(), kind.Trim(), evidence);
|
||||
if (_imports.Any(e => string.Equals(e.ComparisonKey, edge.ComparisonKey, StringComparison.Ordinal)))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_imports.Add(edge);
|
||||
}
|
||||
|
||||
private static IEnumerable<string> ParseConditionSet(string conditionSet)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(conditionSet))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
return conditionSet.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
}
|
||||
|
||||
private static string BuildPurl(string name, string version)
|
||||
{
|
||||
|
||||
@@ -61,9 +61,65 @@ internal static class NodePackageCollector
|
||||
|
||||
AppendDeclaredPackages(packages, lockData);
|
||||
|
||||
AttachImports(context, packages, cancellationToken);
|
||||
|
||||
return packages;
|
||||
}
|
||||
|
||||
private static void AttachImports(LanguageAnalyzerContext context, List<NodePackage> packages, CancellationToken cancellationToken)
|
||||
{
|
||||
foreach (var package in packages)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var packageRoot = string.IsNullOrEmpty(package.RelativePathNormalized)
|
||||
? context.RootPath
|
||||
: Path.Combine(context.RootPath, package.RelativePathNormalized.Replace('/', Path.DirectorySeparatorChar));
|
||||
|
||||
if (!Directory.Exists(packageRoot))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var file in EnumerateSourceFiles(packageRoot))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
string content;
|
||||
try
|
||||
{
|
||||
content = File.ReadAllText(file);
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var imports = NodeImportWalker.AnalyzeImports(context.GetRelativePath(file).Replace(Path.DirectorySeparatorChar, '/'), content);
|
||||
foreach (var edge in imports)
|
||||
{
|
||||
package.AddImport(edge.SourceFile, edge.TargetSpecifier, edge.Kind, edge.Evidence);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<string> EnumerateSourceFiles(string root)
|
||||
{
|
||||
foreach (var extension in new[] { ".js", ".jsx", ".mjs", ".cjs", ".ts", ".tsx" })
|
||||
{
|
||||
foreach (var file in Directory.EnumerateFiles(root, "*" + extension, new EnumerationOptions
|
||||
{
|
||||
RecurseSubdirectories = true,
|
||||
MatchCasing = MatchCasing.CaseInsensitive,
|
||||
IgnoreInaccessible = true
|
||||
}))
|
||||
{
|
||||
yield return file;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void TraverseDirectory(
|
||||
LanguageAnalyzerContext context,
|
||||
string directory,
|
||||
|
||||
@@ -0,0 +1,93 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Reachability;
|
||||
|
||||
public enum ReachabilityState
|
||||
{
|
||||
Unknown = 0,
|
||||
Conditional = 1,
|
||||
Reachable = 2,
|
||||
Unreachable = 3
|
||||
}
|
||||
|
||||
public enum ReachabilityEvidenceKind
|
||||
{
|
||||
StaticPath,
|
||||
RuntimeHit,
|
||||
RuntimeSinkHit,
|
||||
Guard,
|
||||
Mitigation
|
||||
}
|
||||
|
||||
public readonly record struct ReachabilityEvidence(
|
||||
ReachabilityEvidenceKind Kind,
|
||||
string? Reference = null);
|
||||
|
||||
public sealed record ReachabilityLatticeResult(
|
||||
ReachabilityState State,
|
||||
double Score);
|
||||
|
||||
public static class ReachabilityLattice
|
||||
{
|
||||
public static ReachabilityLatticeResult Evaluate(IEnumerable<ReachabilityEvidence> rawEvidence)
|
||||
{
|
||||
var evidence = rawEvidence
|
||||
.Where(e => Enum.IsDefined(typeof(ReachabilityEvidenceKind), e.Kind))
|
||||
.OrderBy(e => e.Kind)
|
||||
.ThenBy(e => e.Reference ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
var hasRuntimeSinkHit = evidence.Any(e => e.Kind is ReachabilityEvidenceKind.RuntimeSinkHit);
|
||||
var hasRuntimeHit = evidence.Any(e => e.Kind is ReachabilityEvidenceKind.RuntimeHit or ReachabilityEvidenceKind.RuntimeSinkHit);
|
||||
var hasStaticPath = evidence.Any(e => e.Kind is ReachabilityEvidenceKind.StaticPath);
|
||||
var guardCount = evidence.Count(e => e.Kind is ReachabilityEvidenceKind.Guard);
|
||||
var mitigationCount = evidence.Count(e => e.Kind is ReachabilityEvidenceKind.Mitigation);
|
||||
|
||||
var score = 0.0;
|
||||
var state = ReachabilityState.Unknown;
|
||||
|
||||
if (hasStaticPath)
|
||||
{
|
||||
state = ReachabilityState.Conditional;
|
||||
score += 0.50;
|
||||
}
|
||||
|
||||
if (hasRuntimeHit)
|
||||
{
|
||||
state = ReachabilityState.Reachable;
|
||||
score += 0.30;
|
||||
if (hasRuntimeSinkHit)
|
||||
{
|
||||
score += 0.10;
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasRuntimeHit && guardCount > 0)
|
||||
{
|
||||
state = state switch
|
||||
{
|
||||
ReachabilityState.Reachable => ReachabilityState.Conditional,
|
||||
ReachabilityState.Conditional => ReachabilityState.Unknown,
|
||||
_ => state
|
||||
};
|
||||
score = Math.Max(score - 0.20 * guardCount, 0);
|
||||
}
|
||||
|
||||
if (!hasRuntimeHit && mitigationCount > 0)
|
||||
{
|
||||
state = ReachabilityState.Unreachable;
|
||||
score = Math.Max(score - 0.30 * mitigationCount, 0);
|
||||
}
|
||||
|
||||
if (state == ReachabilityState.Unknown && score <= 0 && evidence.Count == 0)
|
||||
{
|
||||
return new ReachabilityLatticeResult(ReachabilityState.Unknown, 0);
|
||||
}
|
||||
|
||||
var capped = Math.Clamp(score, 0, 1);
|
||||
var rounded = Math.Round(capped, 2, MidpointRounding.AwayFromZero);
|
||||
return new ReachabilityLatticeResult(state, rounded);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
using System.Text;
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Deno;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Deno.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Deno.Tests.Deno;
|
||||
|
||||
public sealed class DenoLanguageAnalyzerRuntimeTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task IngestsRuntimeTraceAndEmitsSignals()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var runtimePath = Path.Combine(root, "deno-runtime.ndjson");
|
||||
var ndjson = new StringBuilder()
|
||||
.AppendLine("{\"type\":\"deno.module.load\",\"ts\":\"2025-11-18T00:00:00Z\",\"module\":{\"normalized\":\"app/main.ts\",\"path_sha256\":\"abc\"},\"reason\":\"dynamic-import\",\"permissions\":[\"fs\"],\"origin\":\"https://deno.land/x/std@0.208.0/http/server.ts\"}")
|
||||
.AppendLine("{\"type\":\"deno.permission.use\",\"ts\":\"2025-11-18T00:00:01Z\",\"permission\":\"net\",\"module\":{\"normalized\":\"app/net.ts\",\"path_sha256\":\"def\"},\"details\":\"permissions.request\"}")
|
||||
.AppendLine("{\"type\":\"deno.wasm.load\",\"ts\":\"2025-11-18T00:00:02Z\",\"module\":{\"normalized\":\"pkg/module.wasm\",\"path_sha256\":\"ghi\"},\"importer\":\"app/main.ts\",\"reason\":\"instantiate\"}")
|
||||
.AppendLine("{\"type\":\"deno.npm.resolution\",\"ts\":\"2025-11-18T00:00:03Z\",\"specifier\":\"npm:chalk@5\",\"package\":\"chalk\",\"version\":\"5.3.0\",\"resolved\":\"file:///cache/chalk\",\"exists\":true}")
|
||||
.ToString();
|
||||
|
||||
await File.WriteAllTextAsync(runtimePath, ndjson);
|
||||
|
||||
var store = new ScanAnalysisStore();
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System, usageHints: null, services: null, analysisStore: store);
|
||||
|
||||
var analyzer = new DenoLanguageAnalyzer();
|
||||
var engine = new LanguageAnalyzerEngine(new[] { analyzer });
|
||||
await engine.AnalyzeAsync(context, CancellationToken.None);
|
||||
|
||||
Assert.True(store.TryGet(ScanAnalysisKeys.DenoRuntimePayload, out AnalyzerObservationPayload runtimePayload));
|
||||
Assert.Equal("deno.runtime.v1", runtimePayload.Kind);
|
||||
Assert.Equal("application/x-ndjson", runtimePayload.MediaType);
|
||||
|
||||
Assert.True(store.TryGet("surface.lang.deno.permissions", out string? permissions));
|
||||
Assert.Equal("fs,net", permissions);
|
||||
Assert.True(store.TryGet("surface.lang.deno.remote_origins", out string? origins));
|
||||
Assert.Equal("https://deno.land/x/std@0.208.0/http/server.ts", origins);
|
||||
Assert.True(store.TryGet("surface.lang.deno.wasm_modules", out string? wasm));
|
||||
Assert.Equal("1", wasm);
|
||||
Assert.True(store.TryGet("surface.lang.deno.npm_modules", out string? npm));
|
||||
Assert.Equal("1", npm);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Deno.Internal.Runtime;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Deno.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Deno.Tests.Deno;
|
||||
|
||||
public sealed class DenoRuntimeTraceRunnerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ReturnsFalse_WhenEntrypointEnvMissing()
|
||||
{
|
||||
using var env = new EnvironmentVariableScope("STELLA_DENO_ENTRYPOINT", null);
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
|
||||
try
|
||||
{
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var result = await DenoRuntimeTraceRunner.TryExecuteAsync(context, logger: null, CancellationToken.None);
|
||||
|
||||
Assert.False(result);
|
||||
Assert.False(File.Exists(Path.Combine(root, "deno-runtime.ndjson")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReturnsFalse_WhenEntrypointMissing()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
|
||||
try
|
||||
{
|
||||
using var env = new EnvironmentVariableScope("STELLA_DENO_ENTRYPOINT", "app/main.ts");
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
|
||||
var result = await DenoRuntimeTraceRunner.TryExecuteAsync(context, logger: null, CancellationToken.None);
|
||||
|
||||
Assert.False(result);
|
||||
Assert.False(File.Exists(Path.Combine(root, DenoRuntimeShim.FileName)));
|
||||
Assert.False(File.Exists(Path.Combine(root, "deno-runtime.ndjson")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReturnsFalse_WhenDenoBinaryUnavailable()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
|
||||
try
|
||||
{
|
||||
var entrypoint = Path.Combine(root, "main.ts");
|
||||
await File.WriteAllTextAsync(entrypoint, "console.log('hi')");
|
||||
|
||||
using var entryEnv = new EnvironmentVariableScope("STELLA_DENO_ENTRYPOINT", entrypoint);
|
||||
using var binaryEnv = new EnvironmentVariableScope("STELLA_DENO_BINARY", Guid.NewGuid().ToString("N"));
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var result = await DenoRuntimeTraceRunner.TryExecuteAsync(context, logger: null, CancellationToken.None);
|
||||
|
||||
Assert.False(result);
|
||||
Assert.True(File.Exists(Path.Combine(root, DenoRuntimeShim.FileName)));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class EnvironmentVariableScope : IDisposable
|
||||
{
|
||||
private readonly string _name;
|
||||
private readonly string? _original;
|
||||
|
||||
public EnvironmentVariableScope(string name, string? value)
|
||||
{
|
||||
_name = name;
|
||||
_original = Environment.GetEnvironmentVariable(name);
|
||||
Environment.SetEnvironmentVariable(name, value);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
Environment.SetEnvironmentVariable(_name, _original);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
using StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Tests.Node;
|
||||
|
||||
public sealed class NodeEntrypointTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_NormalizesConditions_DedupesAndSorts()
|
||||
{
|
||||
var entry = NodeEntrypoint.Create(
|
||||
path: "src/index.js",
|
||||
binName: "cli",
|
||||
mainField: "index.js",
|
||||
moduleField: null,
|
||||
conditions: new[] { "node", "browser", "node" });
|
||||
|
||||
Assert.Equal("browser,node", entry.ConditionSet);
|
||||
Assert.Equal("src/index.js", entry.Path);
|
||||
Assert.Equal("cli", entry.BinName);
|
||||
Assert.Equal("index.js", entry.MainField);
|
||||
Assert.Null(entry.ModuleField);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_AllowsEmptyConditions()
|
||||
{
|
||||
var entry = NodeEntrypoint.Create("src/app.js", null, null, null, Array.Empty<string>());
|
||||
Assert.Equal(string.Empty, entry.ConditionSet);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,88 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.Scanner.Emit.Reachability;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Tests.Reachability;
|
||||
|
||||
public class ReachabilityLatticeTests
|
||||
{
|
||||
[Fact]
|
||||
public void StaticPath_YieldsConditional()
|
||||
{
|
||||
var result = ReachabilityLattice.Evaluate(new[]
|
||||
{
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.StaticPath, "path1")
|
||||
});
|
||||
|
||||
result.State.Should().Be(ReachabilityState.Conditional);
|
||||
result.Score.Should().Be(0.5);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RuntimeHit_PromotesReachableAndAddsBonus()
|
||||
{
|
||||
var result = ReachabilityLattice.Evaluate(new[]
|
||||
{
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.StaticPath),
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.RuntimeHit)
|
||||
});
|
||||
|
||||
result.State.Should().Be(ReachabilityState.Reachable);
|
||||
result.Score.Should().Be(0.8);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RuntimeSinkHit_AddsAdditionalBonus()
|
||||
{
|
||||
var result = ReachabilityLattice.Evaluate(new[]
|
||||
{
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.RuntimeHit),
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.RuntimeSinkHit)
|
||||
});
|
||||
|
||||
result.State.Should().Be(ReachabilityState.Reachable);
|
||||
result.Score.Should().Be(1.0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Guard_DemotesWhenNoRuntimeEvidence()
|
||||
{
|
||||
var result = ReachabilityLattice.Evaluate(new[]
|
||||
{
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.StaticPath),
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.Guard)
|
||||
});
|
||||
|
||||
result.State.Should().Be(ReachabilityState.Unknown);
|
||||
result.Score.Should().Be(0.3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Mitigation_SetsUnreachableWhenNoRuntime()
|
||||
{
|
||||
var result = ReachabilityLattice.Evaluate(new[]
|
||||
{
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.StaticPath),
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.Mitigation)
|
||||
});
|
||||
|
||||
result.State.Should().Be(ReachabilityState.Unreachable);
|
||||
result.Score.Should().Be(0.2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void OrderIndependentAndRounded()
|
||||
{
|
||||
var shuffled = new[]
|
||||
{
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.Guard),
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.StaticPath),
|
||||
new ReachabilityEvidence(ReachabilityEvidenceKind.RuntimeHit),
|
||||
};
|
||||
|
||||
var result = ReachabilityLattice.Evaluate(shuffled);
|
||||
|
||||
result.State.Should().Be(ReachabilityState.Reachable);
|
||||
result.Score.Should().Be(0.8);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user