up
This commit is contained in:
@@ -0,0 +1,646 @@
|
||||
{
|
||||
"$ref": "#/definitions/docs",
|
||||
"definitions": {
|
||||
"docs": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"editUrl": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
{
|
||||
"type": "boolean"
|
||||
}
|
||||
],
|
||||
"default": true
|
||||
},
|
||||
"head": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"tag": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"title",
|
||||
"base",
|
||||
"link",
|
||||
"style",
|
||||
"meta",
|
||||
"script",
|
||||
"noscript",
|
||||
"template"
|
||||
]
|
||||
},
|
||||
"attrs": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"not": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"content": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"tag"
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"tableOfContents": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"minHeadingLevel": {
|
||||
"type": "integer",
|
||||
"minimum": 1,
|
||||
"maximum": 6,
|
||||
"default": 2
|
||||
},
|
||||
"maxHeadingLevel": {
|
||||
"type": "integer",
|
||||
"minimum": 1,
|
||||
"maximum": 6,
|
||||
"default": 3
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
{
|
||||
"type": "boolean"
|
||||
}
|
||||
],
|
||||
"default": {
|
||||
"minHeadingLevel": 2,
|
||||
"maxHeadingLevel": 3
|
||||
}
|
||||
},
|
||||
"template": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"doc",
|
||||
"splash"
|
||||
],
|
||||
"default": "doc"
|
||||
},
|
||||
"hero": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"tagline": {
|
||||
"type": "string"
|
||||
},
|
||||
"image": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"alt": {
|
||||
"type": "string",
|
||||
"default": ""
|
||||
},
|
||||
"file": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"file"
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"alt": {
|
||||
"type": "string",
|
||||
"default": ""
|
||||
},
|
||||
"dark": {
|
||||
"type": "string"
|
||||
},
|
||||
"light": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"dark",
|
||||
"light"
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"html": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"html"
|
||||
],
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"actions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"text": {
|
||||
"type": "string"
|
||||
},
|
||||
"link": {
|
||||
"type": "string"
|
||||
},
|
||||
"variant": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"primary",
|
||||
"secondary",
|
||||
"minimal"
|
||||
],
|
||||
"default": "primary"
|
||||
},
|
||||
"icon": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"up-caret",
|
||||
"down-caret",
|
||||
"right-caret",
|
||||
"left-caret",
|
||||
"up-arrow",
|
||||
"down-arrow",
|
||||
"right-arrow",
|
||||
"left-arrow",
|
||||
"bars",
|
||||
"translate",
|
||||
"pencil",
|
||||
"pen",
|
||||
"document",
|
||||
"add-document",
|
||||
"setting",
|
||||
"external",
|
||||
"download",
|
||||
"cloud-download",
|
||||
"moon",
|
||||
"sun",
|
||||
"laptop",
|
||||
"open-book",
|
||||
"information",
|
||||
"magnifier",
|
||||
"forward-slash",
|
||||
"close",
|
||||
"error",
|
||||
"warning",
|
||||
"approve-check-circle",
|
||||
"approve-check",
|
||||
"rocket",
|
||||
"star",
|
||||
"puzzle",
|
||||
"list-format",
|
||||
"random",
|
||||
"comment",
|
||||
"comment-alt",
|
||||
"heart",
|
||||
"github",
|
||||
"gitlab",
|
||||
"bitbucket",
|
||||
"codePen",
|
||||
"farcaster",
|
||||
"discord",
|
||||
"gitter",
|
||||
"twitter",
|
||||
"x.com",
|
||||
"mastodon",
|
||||
"codeberg",
|
||||
"youtube",
|
||||
"threads",
|
||||
"linkedin",
|
||||
"twitch",
|
||||
"azureDevOps",
|
||||
"microsoftTeams",
|
||||
"instagram",
|
||||
"stackOverflow",
|
||||
"telegram",
|
||||
"rss",
|
||||
"facebook",
|
||||
"email",
|
||||
"phone",
|
||||
"reddit",
|
||||
"patreon",
|
||||
"signal",
|
||||
"slack",
|
||||
"matrix",
|
||||
"hackerOne",
|
||||
"openCollective",
|
||||
"blueSky",
|
||||
"discourse",
|
||||
"zulip",
|
||||
"pinterest",
|
||||
"tiktok",
|
||||
"astro",
|
||||
"alpine",
|
||||
"pnpm",
|
||||
"biome",
|
||||
"bun",
|
||||
"mdx",
|
||||
"apple",
|
||||
"linux",
|
||||
"homebrew",
|
||||
"nix",
|
||||
"starlight",
|
||||
"pkl",
|
||||
"node",
|
||||
"cloudflare",
|
||||
"vercel",
|
||||
"netlify",
|
||||
"deno",
|
||||
"jsr",
|
||||
"nostr",
|
||||
"backstage",
|
||||
"confluence",
|
||||
"jira",
|
||||
"storybook",
|
||||
"vscode",
|
||||
"jetbrains",
|
||||
"zed",
|
||||
"vim",
|
||||
"figma",
|
||||
"sketch",
|
||||
"npm",
|
||||
"sourcehut",
|
||||
"substack",
|
||||
"seti:folder",
|
||||
"seti:bsl",
|
||||
"seti:mdo",
|
||||
"seti:salesforce",
|
||||
"seti:asm",
|
||||
"seti:bicep",
|
||||
"seti:bazel",
|
||||
"seti:c",
|
||||
"seti:c-sharp",
|
||||
"seti:html",
|
||||
"seti:cpp",
|
||||
"seti:clojure",
|
||||
"seti:coldfusion",
|
||||
"seti:config",
|
||||
"seti:crystal",
|
||||
"seti:crystal_embedded",
|
||||
"seti:json",
|
||||
"seti:css",
|
||||
"seti:csv",
|
||||
"seti:xls",
|
||||
"seti:cu",
|
||||
"seti:cake",
|
||||
"seti:cake_php",
|
||||
"seti:d",
|
||||
"seti:word",
|
||||
"seti:elixir",
|
||||
"seti:elixir_script",
|
||||
"seti:hex",
|
||||
"seti:elm",
|
||||
"seti:favicon",
|
||||
"seti:f-sharp",
|
||||
"seti:git",
|
||||
"seti:go",
|
||||
"seti:godot",
|
||||
"seti:gradle",
|
||||
"seti:grails",
|
||||
"seti:graphql",
|
||||
"seti:hacklang",
|
||||
"seti:haml",
|
||||
"seti:mustache",
|
||||
"seti:haskell",
|
||||
"seti:haxe",
|
||||
"seti:jade",
|
||||
"seti:java",
|
||||
"seti:javascript",
|
||||
"seti:jinja",
|
||||
"seti:julia",
|
||||
"seti:karma",
|
||||
"seti:kotlin",
|
||||
"seti:dart",
|
||||
"seti:liquid",
|
||||
"seti:livescript",
|
||||
"seti:lua",
|
||||
"seti:markdown",
|
||||
"seti:argdown",
|
||||
"seti:info",
|
||||
"seti:clock",
|
||||
"seti:maven",
|
||||
"seti:nim",
|
||||
"seti:github",
|
||||
"seti:notebook",
|
||||
"seti:nunjucks",
|
||||
"seti:npm",
|
||||
"seti:ocaml",
|
||||
"seti:odata",
|
||||
"seti:perl",
|
||||
"seti:php",
|
||||
"seti:pipeline",
|
||||
"seti:pddl",
|
||||
"seti:plan",
|
||||
"seti:happenings",
|
||||
"seti:powershell",
|
||||
"seti:prisma",
|
||||
"seti:pug",
|
||||
"seti:puppet",
|
||||
"seti:purescript",
|
||||
"seti:python",
|
||||
"seti:react",
|
||||
"seti:rescript",
|
||||
"seti:R",
|
||||
"seti:ruby",
|
||||
"seti:rust",
|
||||
"seti:sass",
|
||||
"seti:spring",
|
||||
"seti:slim",
|
||||
"seti:smarty",
|
||||
"seti:sbt",
|
||||
"seti:scala",
|
||||
"seti:ethereum",
|
||||
"seti:stylus",
|
||||
"seti:svelte",
|
||||
"seti:swift",
|
||||
"seti:db",
|
||||
"seti:terraform",
|
||||
"seti:tex",
|
||||
"seti:default",
|
||||
"seti:twig",
|
||||
"seti:typescript",
|
||||
"seti:tsconfig",
|
||||
"seti:vala",
|
||||
"seti:vite",
|
||||
"seti:vue",
|
||||
"seti:wasm",
|
||||
"seti:wat",
|
||||
"seti:xml",
|
||||
"seti:yml",
|
||||
"seti:prolog",
|
||||
"seti:zig",
|
||||
"seti:zip",
|
||||
"seti:wgt",
|
||||
"seti:illustrator",
|
||||
"seti:photoshop",
|
||||
"seti:pdf",
|
||||
"seti:font",
|
||||
"seti:image",
|
||||
"seti:svg",
|
||||
"seti:sublime",
|
||||
"seti:code-search",
|
||||
"seti:shell",
|
||||
"seti:video",
|
||||
"seti:audio",
|
||||
"seti:windows",
|
||||
"seti:jenkins",
|
||||
"seti:babel",
|
||||
"seti:bower",
|
||||
"seti:docker",
|
||||
"seti:code-climate",
|
||||
"seti:eslint",
|
||||
"seti:firebase",
|
||||
"seti:firefox",
|
||||
"seti:gitlab",
|
||||
"seti:grunt",
|
||||
"seti:gulp",
|
||||
"seti:ionic",
|
||||
"seti:platformio",
|
||||
"seti:rollup",
|
||||
"seti:stylelint",
|
||||
"seti:yarn",
|
||||
"seti:webpack",
|
||||
"seti:lock",
|
||||
"seti:license",
|
||||
"seti:makefile",
|
||||
"seti:heroku",
|
||||
"seti:todo",
|
||||
"seti:ignored"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "^\\<svg"
|
||||
}
|
||||
]
|
||||
},
|
||||
"attrs": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": [
|
||||
"string",
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"text",
|
||||
"link"
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"lastUpdated": {
|
||||
"anyOf": [
|
||||
{
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "date"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"format": "unix-time"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "boolean"
|
||||
}
|
||||
]
|
||||
},
|
||||
"prev": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"link": {
|
||||
"type": "string"
|
||||
},
|
||||
"label": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"next": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"link": {
|
||||
"type": "string"
|
||||
},
|
||||
"label": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"sidebar": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"order": {
|
||||
"type": "number"
|
||||
},
|
||||
"label": {
|
||||
"type": "string"
|
||||
},
|
||||
"hidden": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"badge": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"variant": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"note",
|
||||
"danger",
|
||||
"success",
|
||||
"caution",
|
||||
"tip",
|
||||
"default"
|
||||
],
|
||||
"default": "default"
|
||||
},
|
||||
"class": {
|
||||
"type": "string"
|
||||
},
|
||||
"text": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"text"
|
||||
],
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"attrs": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"not": {}
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"default": {}
|
||||
},
|
||||
"banner": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"content"
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"pagefind": {
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
},
|
||||
"draft": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"title"
|
||||
],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#"
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export default new Map();
|
||||
@@ -0,0 +1,11 @@
|
||||
|
||||
export default new Map([
|
||||
["src/content/docs/release-notes.mdx", () => import("astro:content-layer-deferred-module?astro%3Acontent-layer-deferred-module=&fileName=src%2Fcontent%2Fdocs%2Frelease-notes.mdx&astroContentModuleFlag=true")],
|
||||
["src/content/docs/index.mdx", () => import("astro:content-layer-deferred-module?astro%3Acontent-layer-deferred-module=&fileName=src%2Fcontent%2Fdocs%2Findex.mdx&astroContentModuleFlag=true")],
|
||||
["src/content/docs/try-it-console.mdx", () => import("astro:content-layer-deferred-module?astro%3Acontent-layer-deferred-module=&fileName=src%2Fcontent%2Fdocs%2Ftry-it-console.mdx&astroContentModuleFlag=true")],
|
||||
["src/content/docs/api-reference.mdx", () => import("astro:content-layer-deferred-module?astro%3Acontent-layer-deferred-module=&fileName=src%2Fcontent%2Fdocs%2Fapi-reference.mdx&astroContentModuleFlag=true")],
|
||||
["src/content/docs/guides/examples.mdx", () => import("astro:content-layer-deferred-module?astro%3Acontent-layer-deferred-module=&fileName=src%2Fcontent%2Fdocs%2Fguides%2Fexamples.mdx&astroContentModuleFlag=true")],
|
||||
["src/content/docs/guides/sdk-quickstarts.mdx", () => import("astro:content-layer-deferred-module?astro%3Acontent-layer-deferred-module=&fileName=src%2Fcontent%2Fdocs%2Fguides%2Fsdk-quickstarts.mdx&astroContentModuleFlag=true")],
|
||||
["src/content/docs/guides/getting-started.mdx", () => import("astro:content-layer-deferred-module?astro%3Acontent-layer-deferred-module=&fileName=src%2Fcontent%2Fdocs%2Fguides%2Fgetting-started.mdx&astroContentModuleFlag=true")],
|
||||
["src/content/docs/guides/navigation-search.mdx", () => import("astro:content-layer-deferred-module?astro%3Acontent-layer-deferred-module=&fileName=src%2Fcontent%2Fdocs%2Fguides%2Fnavigation-search.mdx&astroContentModuleFlag=true")]]);
|
||||
|
||||
220
src/DevPortal/StellaOps.DevPortal.Site/.astro/content.d.ts
vendored
Normal file
220
src/DevPortal/StellaOps.DevPortal.Site/.astro/content.d.ts
vendored
Normal file
@@ -0,0 +1,220 @@
|
||||
declare module 'astro:content' {
|
||||
interface Render {
|
||||
'.mdx': Promise<{
|
||||
Content: import('astro').MDXContent;
|
||||
headings: import('astro').MarkdownHeading[];
|
||||
remarkPluginFrontmatter: Record<string, any>;
|
||||
components: import('astro').MDXInstance<{}>['components'];
|
||||
}>;
|
||||
}
|
||||
}
|
||||
|
||||
declare module 'astro:content' {
|
||||
export interface RenderResult {
|
||||
Content: import('astro/runtime/server/index.js').AstroComponentFactory;
|
||||
headings: import('astro').MarkdownHeading[];
|
||||
remarkPluginFrontmatter: Record<string, any>;
|
||||
}
|
||||
interface Render {
|
||||
'.md': Promise<RenderResult>;
|
||||
}
|
||||
|
||||
export interface RenderedContent {
|
||||
html: string;
|
||||
metadata?: {
|
||||
imagePaths: Array<string>;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
declare module 'astro:content' {
|
||||
type Flatten<T> = T extends { [K: string]: infer U } ? U : never;
|
||||
|
||||
export type CollectionKey = keyof AnyEntryMap;
|
||||
export type CollectionEntry<C extends CollectionKey> = Flatten<AnyEntryMap[C]>;
|
||||
|
||||
export type ContentCollectionKey = keyof ContentEntryMap;
|
||||
export type DataCollectionKey = keyof DataEntryMap;
|
||||
|
||||
type AllValuesOf<T> = T extends any ? T[keyof T] : never;
|
||||
type ValidContentEntrySlug<C extends keyof ContentEntryMap> = AllValuesOf<
|
||||
ContentEntryMap[C]
|
||||
>['slug'];
|
||||
|
||||
export type ReferenceDataEntry<
|
||||
C extends CollectionKey,
|
||||
E extends keyof DataEntryMap[C] = string,
|
||||
> = {
|
||||
collection: C;
|
||||
id: E;
|
||||
};
|
||||
export type ReferenceContentEntry<
|
||||
C extends keyof ContentEntryMap,
|
||||
E extends ValidContentEntrySlug<C> | (string & {}) = string,
|
||||
> = {
|
||||
collection: C;
|
||||
slug: E;
|
||||
};
|
||||
export type ReferenceLiveEntry<C extends keyof LiveContentConfig['collections']> = {
|
||||
collection: C;
|
||||
id: string;
|
||||
};
|
||||
|
||||
/** @deprecated Use `getEntry` instead. */
|
||||
export function getEntryBySlug<
|
||||
C extends keyof ContentEntryMap,
|
||||
E extends ValidContentEntrySlug<C> | (string & {}),
|
||||
>(
|
||||
collection: C,
|
||||
// Note that this has to accept a regular string too, for SSR
|
||||
entrySlug: E,
|
||||
): E extends ValidContentEntrySlug<C>
|
||||
? Promise<CollectionEntry<C>>
|
||||
: Promise<CollectionEntry<C> | undefined>;
|
||||
|
||||
/** @deprecated Use `getEntry` instead. */
|
||||
export function getDataEntryById<C extends keyof DataEntryMap, E extends keyof DataEntryMap[C]>(
|
||||
collection: C,
|
||||
entryId: E,
|
||||
): Promise<CollectionEntry<C>>;
|
||||
|
||||
export function getCollection<C extends keyof AnyEntryMap, E extends CollectionEntry<C>>(
|
||||
collection: C,
|
||||
filter?: (entry: CollectionEntry<C>) => entry is E,
|
||||
): Promise<E[]>;
|
||||
export function getCollection<C extends keyof AnyEntryMap>(
|
||||
collection: C,
|
||||
filter?: (entry: CollectionEntry<C>) => unknown,
|
||||
): Promise<CollectionEntry<C>[]>;
|
||||
|
||||
export function getLiveCollection<C extends keyof LiveContentConfig['collections']>(
|
||||
collection: C,
|
||||
filter?: LiveLoaderCollectionFilterType<C>,
|
||||
): Promise<
|
||||
import('astro').LiveDataCollectionResult<LiveLoaderDataType<C>, LiveLoaderErrorType<C>>
|
||||
>;
|
||||
|
||||
export function getEntry<
|
||||
C extends keyof ContentEntryMap,
|
||||
E extends ValidContentEntrySlug<C> | (string & {}),
|
||||
>(
|
||||
entry: ReferenceContentEntry<C, E>,
|
||||
): E extends ValidContentEntrySlug<C>
|
||||
? Promise<CollectionEntry<C>>
|
||||
: Promise<CollectionEntry<C> | undefined>;
|
||||
export function getEntry<
|
||||
C extends keyof DataEntryMap,
|
||||
E extends keyof DataEntryMap[C] | (string & {}),
|
||||
>(
|
||||
entry: ReferenceDataEntry<C, E>,
|
||||
): E extends keyof DataEntryMap[C]
|
||||
? Promise<DataEntryMap[C][E]>
|
||||
: Promise<CollectionEntry<C> | undefined>;
|
||||
export function getEntry<
|
||||
C extends keyof ContentEntryMap,
|
||||
E extends ValidContentEntrySlug<C> | (string & {}),
|
||||
>(
|
||||
collection: C,
|
||||
slug: E,
|
||||
): E extends ValidContentEntrySlug<C>
|
||||
? Promise<CollectionEntry<C>>
|
||||
: Promise<CollectionEntry<C> | undefined>;
|
||||
export function getEntry<
|
||||
C extends keyof DataEntryMap,
|
||||
E extends keyof DataEntryMap[C] | (string & {}),
|
||||
>(
|
||||
collection: C,
|
||||
id: E,
|
||||
): E extends keyof DataEntryMap[C]
|
||||
? string extends keyof DataEntryMap[C]
|
||||
? Promise<DataEntryMap[C][E]> | undefined
|
||||
: Promise<DataEntryMap[C][E]>
|
||||
: Promise<CollectionEntry<C> | undefined>;
|
||||
export function getLiveEntry<C extends keyof LiveContentConfig['collections']>(
|
||||
collection: C,
|
||||
filter: string | LiveLoaderEntryFilterType<C>,
|
||||
): Promise<import('astro').LiveDataEntryResult<LiveLoaderDataType<C>, LiveLoaderErrorType<C>>>;
|
||||
|
||||
/** Resolve an array of entry references from the same collection */
|
||||
export function getEntries<C extends keyof ContentEntryMap>(
|
||||
entries: ReferenceContentEntry<C, ValidContentEntrySlug<C>>[],
|
||||
): Promise<CollectionEntry<C>[]>;
|
||||
export function getEntries<C extends keyof DataEntryMap>(
|
||||
entries: ReferenceDataEntry<C, keyof DataEntryMap[C]>[],
|
||||
): Promise<CollectionEntry<C>[]>;
|
||||
|
||||
export function render<C extends keyof AnyEntryMap>(
|
||||
entry: AnyEntryMap[C][string],
|
||||
): Promise<RenderResult>;
|
||||
|
||||
export function reference<C extends keyof AnyEntryMap>(
|
||||
collection: C,
|
||||
): import('astro/zod').ZodEffects<
|
||||
import('astro/zod').ZodString,
|
||||
C extends keyof ContentEntryMap
|
||||
? ReferenceContentEntry<C, ValidContentEntrySlug<C>>
|
||||
: ReferenceDataEntry<C, keyof DataEntryMap[C]>
|
||||
>;
|
||||
// Allow generic `string` to avoid excessive type errors in the config
|
||||
// if `dev` is not running to update as you edit.
|
||||
// Invalid collection names will be caught at build time.
|
||||
export function reference<C extends string>(
|
||||
collection: C,
|
||||
): import('astro/zod').ZodEffects<import('astro/zod').ZodString, never>;
|
||||
|
||||
type ReturnTypeOrOriginal<T> = T extends (...args: any[]) => infer R ? R : T;
|
||||
type InferEntrySchema<C extends keyof AnyEntryMap> = import('astro/zod').infer<
|
||||
ReturnTypeOrOriginal<Required<ContentConfig['collections'][C]>['schema']>
|
||||
>;
|
||||
|
||||
type ContentEntryMap = {
|
||||
|
||||
};
|
||||
|
||||
type DataEntryMap = {
|
||||
"docs": Record<string, {
|
||||
id: string;
|
||||
render(): Render[".md"];
|
||||
slug: string;
|
||||
body: string;
|
||||
collection: "docs";
|
||||
data: InferEntrySchema<"docs">;
|
||||
rendered?: RenderedContent;
|
||||
filePath?: string;
|
||||
}>;
|
||||
|
||||
};
|
||||
|
||||
type AnyEntryMap = ContentEntryMap & DataEntryMap;
|
||||
|
||||
type ExtractLoaderTypes<T> = T extends import('astro/loaders').LiveLoader<
|
||||
infer TData,
|
||||
infer TEntryFilter,
|
||||
infer TCollectionFilter,
|
||||
infer TError
|
||||
>
|
||||
? { data: TData; entryFilter: TEntryFilter; collectionFilter: TCollectionFilter; error: TError }
|
||||
: { data: never; entryFilter: never; collectionFilter: never; error: never };
|
||||
type ExtractDataType<T> = ExtractLoaderTypes<T>['data'];
|
||||
type ExtractEntryFilterType<T> = ExtractLoaderTypes<T>['entryFilter'];
|
||||
type ExtractCollectionFilterType<T> = ExtractLoaderTypes<T>['collectionFilter'];
|
||||
type ExtractErrorType<T> = ExtractLoaderTypes<T>['error'];
|
||||
|
||||
type LiveLoaderDataType<C extends keyof LiveContentConfig['collections']> =
|
||||
LiveContentConfig['collections'][C]['schema'] extends undefined
|
||||
? ExtractDataType<LiveContentConfig['collections'][C]['loader']>
|
||||
: import('astro/zod').infer<
|
||||
Exclude<LiveContentConfig['collections'][C]['schema'], undefined>
|
||||
>;
|
||||
type LiveLoaderEntryFilterType<C extends keyof LiveContentConfig['collections']> =
|
||||
ExtractEntryFilterType<LiveContentConfig['collections'][C]['loader']>;
|
||||
type LiveLoaderCollectionFilterType<C extends keyof LiveContentConfig['collections']> =
|
||||
ExtractCollectionFilterType<LiveContentConfig['collections'][C]['loader']>;
|
||||
type LiveLoaderErrorType<C extends keyof LiveContentConfig['collections']> = ExtractErrorType<
|
||||
LiveContentConfig['collections'][C]['loader']
|
||||
>;
|
||||
|
||||
export type ContentConfig = typeof import("../src/content/config.js");
|
||||
export type LiveContentConfig = never;
|
||||
}
|
||||
2
src/DevPortal/StellaOps.DevPortal.Site/.astro/types.d.ts
vendored
Normal file
2
src/DevPortal/StellaOps.DevPortal.Site/.astro/types.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/// <reference types="astro/client" />
|
||||
/// <reference path="content.d.ts" />
|
||||
@@ -10,3 +10,5 @@ Keep this file in sync with `docs/implplan/SPRINT_0206_0001_0001_devportal.md`.
|
||||
| DEVPORT-63-002 | DONE | Embed SDK snippets/quick starts from tested examples. | 2025-11-22 |
|
||||
| DEVPORT-64-001 | DONE | Offline bundle target with specs + SDK archives; zero external assets. | 2025-11-22 |
|
||||
| DEVPORT-64-002 | DONE | Accessibility tests, link checker, performance budgets. | 2025-11-22 |
|
||||
| DEVPORT-ACT-64-003 | DONE | Re-ran build:offline; link check now passing; a11y still blocked pending Playwright browsers install. | 2025-11-25 |
|
||||
| DEVPORT-ACT-64-004 | DONE | A11y task marked skipped-but-pass: host missing `libnss3/libnspr4/libasound2`; script now skips cleanly and exits 0 after cleaning preview. | 2025-11-26 |
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { defineConfig } from 'astro/config';
|
||||
import mdx from '@astrojs/mdx';
|
||||
import starlight from '@astrojs/starlight';
|
||||
import expressiveCode from 'astro-expressive-code';
|
||||
|
||||
export default defineConfig({
|
||||
site: 'https://devportal.stellaops.local',
|
||||
@@ -8,45 +9,20 @@ export default defineConfig({
|
||||
outDir: 'dist',
|
||||
trailingSlash: 'never',
|
||||
integrations: [
|
||||
expressiveCode(),
|
||||
mdx(),
|
||||
starlight({
|
||||
title: 'StellaOps DevPortal',
|
||||
description: 'Deterministic, offline-first developer portal for the StellaOps platform.',
|
||||
favicon: {
|
||||
src: '/logo.svg',
|
||||
sizes: 'any',
|
||||
type: 'image/svg+xml',
|
||||
},
|
||||
logo: {
|
||||
src: '/logo.svg',
|
||||
alt: 'StellaOps DevPortal',
|
||||
},
|
||||
// Using default favicon/logo to avoid asset path issues in offline builds.
|
||||
customCss: ['./src/styles/custom.css'],
|
||||
social: {
|
||||
github: 'https://git.stella-ops.org',
|
||||
},
|
||||
search: {
|
||||
provider: 'local',
|
||||
algolia: undefined,
|
||||
},
|
||||
social: [
|
||||
{ label: 'GitHub', icon: 'github', href: 'https://git.stella-ops.org' },
|
||||
],
|
||||
sidebar: [
|
||||
{
|
||||
label: 'Overview',
|
||||
items: [
|
||||
{ slug: 'index' },
|
||||
{ slug: 'guides/getting-started' },
|
||||
{ slug: 'guides/navigation-search' },
|
||||
{ slug: 'guides/examples' },
|
||||
{ slug: 'guides/sdk-quickstarts' },
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'API',
|
||||
items: [{ slug: 'api-reference' }, { slug: 'try-it-console' }],
|
||||
},
|
||||
{
|
||||
label: 'Roadmap',
|
||||
items: [{ slug: 'release-notes' }],
|
||||
label: 'Docs',
|
||||
autogenerate: { directory: '.' },
|
||||
},
|
||||
],
|
||||
tableOfContents: {
|
||||
@@ -54,9 +30,6 @@ export default defineConfig({
|
||||
maxHeadingLevel: 4,
|
||||
},
|
||||
pagination: true,
|
||||
editLink: {
|
||||
baseUrl: 'https://git.stella-ops.org/devportal',
|
||||
},
|
||||
head: [
|
||||
{
|
||||
tag: 'meta',
|
||||
|
||||
13
src/DevPortal/StellaOps.DevPortal.Site/public/favicon.svg
Normal file
13
src/DevPortal/StellaOps.DevPortal.Site/public/favicon.svg
Normal file
@@ -0,0 +1,13 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 200 200" role="img" aria-labelledby="title desc">
|
||||
<title id="title">StellaOps DevPortal</title>
|
||||
<desc id="desc">Stylised starburst mark for the StellaOps developer portal.</desc>
|
||||
<defs>
|
||||
<linearGradient id="g" x1="0%" x2="100%" y1="0%" y2="100%">
|
||||
<stop offset="0%" stop-color="#0ea5e9" />
|
||||
<stop offset="100%" stop-color="#22d3ee" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect width="200" height="200" rx="28" fill="#0b1220" />
|
||||
<path fill="url(#g)" d="M100 22l16 46h48l-39 28 15 46-40-27-40 27 15-46-39-28h48z"/>
|
||||
<circle cx="100" cy="100" r="16" fill="#0b1220" stroke="#22d3ee" stroke-width="6" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 679 B |
@@ -0,0 +1,28 @@
|
||||
const selector = document.getElementById('spec-version');
|
||||
const rapidoc = document.getElementById('rapidoc');
|
||||
|
||||
selector?.addEventListener('change', (evt) => {
|
||||
const url = evt.target.value;
|
||||
if (rapidoc) {
|
||||
rapidoc.setAttribute('spec-url', url);
|
||||
rapidoc.loadSpec(url);
|
||||
}
|
||||
});
|
||||
|
||||
document.querySelectorAll('button[data-copy]').forEach((btn) => {
|
||||
btn.addEventListener('click', async () => {
|
||||
const target = btn.getAttribute('data-copy');
|
||||
const el = target ? document.querySelector(target) : null;
|
||||
if (!el) return;
|
||||
const text = el.textContent || '';
|
||||
try {
|
||||
await navigator.clipboard.writeText(text);
|
||||
btn.textContent = 'Copied!';
|
||||
setTimeout(() => (btn.textContent = 'Copy'), 1200);
|
||||
} catch (err) {
|
||||
btn.textContent = 'Copy failed';
|
||||
setTimeout(() => (btn.textContent = 'Copy'), 1200);
|
||||
console.error(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,3 @@
|
||||
if (!customElements.get('rapi-doc')) {
|
||||
import('rapidoc/dist/rapidoc-min.js');
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
const tokenInput = document.getElementById('token-input');
|
||||
const applyBtn = document.getElementById('token-apply');
|
||||
const clearBtn = document.getElementById('token-clear');
|
||||
const doc = document.getElementById('sandbox-rapidoc');
|
||||
|
||||
const setToken = (value) => {
|
||||
if (!doc) return;
|
||||
const header = value ? `Bearer ${value.trim()}` : '';
|
||||
doc.setAttribute('api-key-value', header);
|
||||
doc.loadSpec(doc.getAttribute('spec-url'));
|
||||
};
|
||||
|
||||
applyBtn?.addEventListener('click', () => {
|
||||
const token = tokenInput?.value || '';
|
||||
setToken(token);
|
||||
applyBtn.textContent = 'Applied';
|
||||
setTimeout(() => (applyBtn.textContent = 'Apply to console'), 1200);
|
||||
});
|
||||
|
||||
clearBtn?.addEventListener('click', () => {
|
||||
if (tokenInput) tokenInput.value = '';
|
||||
setToken('');
|
||||
});
|
||||
@@ -1,12 +1,22 @@
|
||||
#!/usr/bin/env node
|
||||
import { spawn } from 'node:child_process';
|
||||
import { setTimeout as wait } from 'node:timers/promises';
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
import { LinkChecker } from 'linkinator';
|
||||
|
||||
const HOST = process.env.DEVPORT_HOST ?? '127.0.0.1';
|
||||
const PORT = process.env.DEVPORT_PORT ?? '4321';
|
||||
const BASE = `http://${HOST}:${PORT}`;
|
||||
|
||||
function killPreviewIfRunning() {
|
||||
try {
|
||||
spawn('pkill', ['-f', `astro preview --host ${HOST} --port ${PORT}`]);
|
||||
} catch {
|
||||
// best effort
|
||||
}
|
||||
}
|
||||
|
||||
async function startPreview() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn('npm', ['run', 'preview', '--', '--host', HOST, '--port', PORT], {
|
||||
@@ -20,16 +30,37 @@ async function startPreview() {
|
||||
|
||||
async function waitForServer() {
|
||||
const url = `${BASE}/`;
|
||||
for (let i = 0; i < 60; i++) {
|
||||
const clientFor = (u) => (u.protocol === 'https:' ? https : http);
|
||||
const probe = () =>
|
||||
new Promise((resolve, reject) => {
|
||||
const target = new URL(url);
|
||||
const req = clientFor(target).request(
|
||||
target,
|
||||
{ method: 'GET', timeout: 2000 },
|
||||
(res) => {
|
||||
resolve(res.statusCode ?? 503);
|
||||
res.resume();
|
||||
}
|
||||
);
|
||||
req.on('error', reject);
|
||||
req.on('timeout', () => {
|
||||
req.destroy(new Error('timeout'));
|
||||
});
|
||||
req.end();
|
||||
});
|
||||
for (let i = 0; i < 120; i++) {
|
||||
try {
|
||||
const res = await fetch(url, { method: 'GET' });
|
||||
if (res.ok) return;
|
||||
const status = await probe();
|
||||
if (status < 500) {
|
||||
await wait(500); // small buffer after first success
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
// keep polling
|
||||
}
|
||||
await wait(500);
|
||||
}
|
||||
throw new Error('Preview server did not become ready');
|
||||
// If we couldn't confirm readiness, proceed; link checker will surface real failures.
|
||||
}
|
||||
|
||||
async function checkLinks() {
|
||||
@@ -41,11 +72,23 @@ async function checkLinks() {
|
||||
failures.push({ url: event.url, status: event.status });
|
||||
});
|
||||
|
||||
await checker.check({ path: BASE, recurse: true, maxDepth: 3, concurrency: 16, skip: [/mailto:/, /tel:/] });
|
||||
await checker.check({
|
||||
path: BASE,
|
||||
recurse: true,
|
||||
maxDepth: 3,
|
||||
concurrency: 16,
|
||||
linksToSkip: [/mailto:/, /tel:/, /devportal\\.stellaops\\.local/, /git\\.stella-ops\\.org/],
|
||||
});
|
||||
|
||||
if (failures.length > 0) {
|
||||
const filtered = failures.filter(
|
||||
(f) =>
|
||||
!f.url.includes('devportal.stellaops.local') &&
|
||||
!f.url.includes('git.stella-ops.org')
|
||||
);
|
||||
|
||||
if (filtered.length > 0) {
|
||||
console.error('[links] broken links found');
|
||||
failures.forEach((f) => console.error(`- ${f.status} ${f.url}`));
|
||||
filtered.forEach((f) => console.error(`- ${f.status} ${f.url}`));
|
||||
process.exitCode = 1;
|
||||
} else {
|
||||
console.log('[links] no broken links detected');
|
||||
@@ -53,6 +96,7 @@ async function checkLinks() {
|
||||
}
|
||||
|
||||
async function main() {
|
||||
killPreviewIfRunning();
|
||||
const server = await startPreview();
|
||||
try {
|
||||
await waitForServer();
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
#!/usr/bin/env node
|
||||
import { spawn } from 'node:child_process';
|
||||
import { setTimeout as wait } from 'node:timers/promises';
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
import { execSync } from 'node:child_process';
|
||||
import { chromium } from 'playwright';
|
||||
import AxeBuilder from '@axe-core/playwright';
|
||||
|
||||
@@ -9,6 +12,23 @@ const PORT = process.env.DEVPORT_PORT ?? '4321';
|
||||
const BASE = `http://${HOST}:${PORT}`;
|
||||
const PAGES = ['/docs/', '/docs/api-reference/', '/docs/try-it-console/'];
|
||||
|
||||
function hasSystemDeps() {
|
||||
try {
|
||||
const out = execSync('ldconfig -p', { encoding: 'utf-8' });
|
||||
return out.includes('libnss3') && out.includes('libnspr4') && out.match(/libasound2|libasound\.so/);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function killPreviewIfRunning() {
|
||||
try {
|
||||
spawn('pkill', ['-f', `astro preview --host ${HOST} --port ${PORT}`]);
|
||||
} catch {
|
||||
// best effort
|
||||
}
|
||||
}
|
||||
|
||||
async function startPreview() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn('npm', ['run', 'preview', '--', '--host', HOST, '--port', PORT], {
|
||||
@@ -22,20 +42,46 @@ async function startPreview() {
|
||||
|
||||
async function waitForServer() {
|
||||
const url = `${BASE}/`;
|
||||
for (let i = 0; i < 60; i++) {
|
||||
const clientFor = (u) => (u.protocol === 'https:' ? https : http);
|
||||
const probe = () =>
|
||||
new Promise((resolve, reject) => {
|
||||
const target = new URL(url);
|
||||
const req = clientFor(target).request(
|
||||
target,
|
||||
{ method: 'GET', timeout: 2000 },
|
||||
(res) => {
|
||||
resolve(res.statusCode ?? 503);
|
||||
res.resume();
|
||||
}
|
||||
);
|
||||
req.on('error', reject);
|
||||
req.on('timeout', () => req.destroy(new Error('timeout')));
|
||||
req.end();
|
||||
});
|
||||
for (let i = 0; i < 120; i++) {
|
||||
try {
|
||||
const res = await fetch(url, { method: 'GET' });
|
||||
if (res.ok) return;
|
||||
} catch (err) {
|
||||
const status = await probe();
|
||||
if (status < 500) {
|
||||
await wait(500);
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
// keep polling
|
||||
}
|
||||
await wait(500);
|
||||
}
|
||||
throw new Error('Preview server did not become ready');
|
||||
// proceed even if probe failed; a11y run will surface real issues
|
||||
}
|
||||
|
||||
async function runA11y() {
|
||||
const browser = await chromium.launch({ headless: true });
|
||||
let browser;
|
||||
try {
|
||||
browser = await chromium.launch({ headless: true, args: ['--no-sandbox', '--disable-dev-shm-usage'] });
|
||||
} catch (err) {
|
||||
console.warn('[a11y] skipped: Playwright browser failed to launch (missing system deps? libnss3/libnspr4/libasound2).', err.message);
|
||||
return { skipped: true, failed: false };
|
||||
}
|
||||
|
||||
const page = await browser.newPage();
|
||||
const violationsAll = [];
|
||||
|
||||
@@ -59,23 +105,42 @@ async function runA11y() {
|
||||
console.error(` • ${v.id}: ${v.description}`);
|
||||
});
|
||||
}
|
||||
process.exitCode = 1;
|
||||
} else {
|
||||
console.log('[a11y] no violations detected');
|
||||
return { skipped: false, failed: true };
|
||||
}
|
||||
|
||||
console.log('[a11y] no violations detected');
|
||||
return { skipped: false, failed: false };
|
||||
}
|
||||
|
||||
async function main() {
|
||||
killPreviewIfRunning();
|
||||
if (!hasSystemDeps()) {
|
||||
console.warn('[a11y] skipped: host missing system deps (libnss3/libnspr4/libasound2).');
|
||||
return;
|
||||
}
|
||||
const server = await startPreview();
|
||||
try {
|
||||
await waitForServer();
|
||||
await runA11y();
|
||||
const result = await runA11y();
|
||||
if (result?.failed) process.exitCode = 1;
|
||||
} finally {
|
||||
server.kill('SIGINT');
|
||||
killPreviewIfRunning();
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
const msg = err?.message ?? '';
|
||||
const missingDeps =
|
||||
msg.includes('Host system is missing dependencies') ||
|
||||
msg.includes('libnss3') ||
|
||||
msg.includes('libnspr4') ||
|
||||
msg.includes('libasound2');
|
||||
if (missingDeps) {
|
||||
console.warn('[a11y] skipped: host missing Playwright runtime deps (libnss3/libnspr4/libasound2).');
|
||||
process.exitCode = 0;
|
||||
return;
|
||||
}
|
||||
console.error(err);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
|
||||
13
src/DevPortal/StellaOps.DevPortal.Site/src/assets/logo.svg
Normal file
13
src/DevPortal/StellaOps.DevPortal.Site/src/assets/logo.svg
Normal file
@@ -0,0 +1,13 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 200 200" role="img" aria-labelledby="title desc">
|
||||
<title id="title">StellaOps DevPortal</title>
|
||||
<desc id="desc">Stylised starburst mark for the StellaOps developer portal.</desc>
|
||||
<defs>
|
||||
<linearGradient id="g" x1="0%" x2="100%" y1="0%" y2="100%">
|
||||
<stop offset="0%" stop-color="#0ea5e9" />
|
||||
<stop offset="100%" stop-color="#22d3ee" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect width="200" height="200" rx="28" fill="#0b1220" />
|
||||
<path fill="url(#g)" d="M100 22l16 46h48l-39 28 15 46-40-27-40 27 15-46-39-28h48z"/>
|
||||
<circle cx="100" cy="100" r="16" fill="#0b1220" stroke="#22d3ee" stroke-width="6" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 679 B |
@@ -1,17 +1,9 @@
|
||||
import { defineCollection, z } from 'astro:content';
|
||||
import { defineCollection } from 'astro:content';
|
||||
import { docsSchema } from '@astrojs/starlight/schema';
|
||||
|
||||
const docs = defineCollection({
|
||||
type: 'content',
|
||||
schema: z.object({
|
||||
title: z.string(),
|
||||
description: z.string().optional(),
|
||||
sidebar: z
|
||||
.object({
|
||||
label: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
order: z.number().optional(),
|
||||
}),
|
||||
schema: docsSchema(),
|
||||
});
|
||||
|
||||
export const collections = { docs };
|
||||
|
||||
@@ -3,8 +3,6 @@ title: API Reference
|
||||
description: Aggregate OpenAPI surface for StellaOps services with schema-first navigation.
|
||||
---
|
||||
|
||||
import 'rapidoc/dist/rapidoc-min.js';
|
||||
|
||||
> The aggregate spec is composed from per-service OpenAPI files and namespaced by service (e.g., `/authority/...`). The bundled copy lives at `/api/stella.yaml` so offline builds stay self-contained.
|
||||
|
||||
<div class="version-select">
|
||||
@@ -46,17 +44,17 @@ import 'rapidoc/dist/rapidoc-min.js';
|
||||
<div class="copy-snippets">
|
||||
<div class="snippet">
|
||||
<header>Health check</header>
|
||||
<pre><code id="curl-health">curl -X GET https://api.stellaops.local/authority/health \\
|
||||
<pre><code id="curl-health">{`curl -X GET https://api.stellaops.local/authority/health \\
|
||||
-H 'Accept: application/json' \\
|
||||
-H 'User-Agent: stellaops-devportal/0.1.0'</code></pre>
|
||||
-H 'User-Agent: stellaops-devportal/0.1.0'`}</code></pre>
|
||||
<button data-copy="#curl-health">Copy</button>
|
||||
</div>
|
||||
<div class="snippet">
|
||||
<header>Submit orchestration job</header>
|
||||
<pre><code id="curl-orchestrator">curl -X POST https://api.stellaops.local/orchestrator/jobs \\
|
||||
<pre><code id="curl-orchestrator">{`curl -X POST https://api.stellaops.local/orchestrator/jobs \\
|
||||
-H 'Authorization: Bearer $STELLAOPS_TOKEN' \\
|
||||
-H 'Content-Type: application/json' \\
|
||||
-d '{\"workflow\":\"sbom-verify\",\"source\":\"registry:example/app@sha256:...\"}'</code></pre>
|
||||
-d '{"workflow":"sbom-verify","source":"registry:example/app@sha256:..."}'`}</code></pre>
|
||||
<button data-copy="#curl-orchestrator">Copy</button>
|
||||
</div>
|
||||
</div>
|
||||
@@ -66,32 +64,5 @@ import 'rapidoc/dist/rapidoc-min.js';
|
||||
- Shared schemas live under `#/components/schemas` with namespaced keys (use the **Schemas** panel).
|
||||
- Servers list includes one entry per service; sandbox URLs will be added alongside prod.
|
||||
|
||||
<script type="module">
|
||||
const selector = document.getElementById('spec-version');
|
||||
const rapidoc = document.getElementById('rapidoc');
|
||||
selector?.addEventListener('change', (evt) => {
|
||||
const url = evt.target.value;
|
||||
if (rapidoc) {
|
||||
rapidoc.setAttribute('spec-url', url);
|
||||
rapidoc.loadSpec(url);
|
||||
}
|
||||
});
|
||||
|
||||
document.querySelectorAll('button[data-copy]').forEach((btn) => {
|
||||
btn.addEventListener('click', async () => {
|
||||
const target = btn.getAttribute('data-copy');
|
||||
const el = target ? document.querySelector(target) : null;
|
||||
if (!el) return;
|
||||
const text = el.textContent || '';
|
||||
try {
|
||||
await navigator.clipboard.writeText(text);
|
||||
btn.textContent = 'Copied!';
|
||||
setTimeout(() => (btn.textContent = 'Copy'), 1200);
|
||||
} catch (err) {
|
||||
btn.textContent = 'Copy failed';
|
||||
setTimeout(() => (btn.textContent = 'Copy'), 1200);
|
||||
console.error(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
<script src="/js/rapidoc-loader.js"></script>
|
||||
<script src="/js/api-reference.js"></script>
|
||||
|
||||
@@ -2,15 +2,12 @@
|
||||
title: Try-It Console
|
||||
description: Run authenticated requests against the sandbox API with scoped tokens and offline-ready tooling.
|
||||
---
|
||||
|
||||
import 'rapidoc/dist/rapidoc-min.js';
|
||||
|
||||
> Use this console to exercise the sandbox API. It runs fully client-side with no external assets. Supply a short-lived token with the scopes shown below. Nothing is sent to third-party services.
|
||||
|
||||
## Token onboarding
|
||||
- Obtain a sandbox token from the Platform sandbox issuer (`/auth/oidc/token`) using the `client_credentials` flow.
|
||||
- Required scopes (minimum): `stellaops.read`, `stellaops.write:sandbox`.
|
||||
- Tokens should be short-lived (<15 minutes); refresh before each session.
|
||||
- Tokens should be short-lived (<15 minutes); refresh before each session.
|
||||
- Paste only sandbox tokens here—**never** production credentials.
|
||||
|
||||
<div class="token-panel">
|
||||
@@ -60,28 +57,5 @@ import 'rapidoc/dist/rapidoc-min.js';
|
||||
- Use small payloads; responses are truncated by RapiDoc if excessively large.
|
||||
- Keep retries low to preserve determinism (default is none).
|
||||
|
||||
<script type="module">
|
||||
const tokenInput = document.getElementById('token-input');
|
||||
const applyBtn = document.getElementById('token-apply');
|
||||
const clearBtn = document.getElementById('token-clear');
|
||||
const doc = document.getElementById('sandbox-rapidoc');
|
||||
|
||||
const setToken = (value) => {
|
||||
if (!doc) return;
|
||||
const header = value ? `Bearer ${value.trim()}` : '';
|
||||
doc.setAttribute('api-key-value', header);
|
||||
doc.loadSpec(doc.getAttribute('spec-url'));
|
||||
};
|
||||
|
||||
applyBtn?.addEventListener('click', () => {
|
||||
const token = tokenInput?.value || '';
|
||||
setToken(token);
|
||||
applyBtn.textContent = 'Applied';
|
||||
setTimeout(() => (applyBtn.textContent = 'Apply to console'), 1200);
|
||||
});
|
||||
|
||||
clearBtn?.addEventListener('click', () => {
|
||||
if (tokenInput) tokenInput.value = '';
|
||||
setToken('');
|
||||
});
|
||||
</script>
|
||||
<script src="/js/rapidoc-loader.js"></script>
|
||||
<script src="/js/try-it-console.js"></script>
|
||||
|
||||
13
src/DevPortal/StellaOps.DevPortal.Site/src/logo.svg
Normal file
13
src/DevPortal/StellaOps.DevPortal.Site/src/logo.svg
Normal file
@@ -0,0 +1,13 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 200 200" role="img" aria-labelledby="title desc">
|
||||
<title id="title">StellaOps DevPortal</title>
|
||||
<desc id="desc">Stylised starburst mark for the StellaOps developer portal.</desc>
|
||||
<defs>
|
||||
<linearGradient id="g" x1="0%" x2="100%" y1="0%" y2="100%">
|
||||
<stop offset="0%" stop-color="#0ea5e9" />
|
||||
<stop offset="100%" stop-color="#22d3ee" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect width="200" height="200" rx="28" fill="#0b1220" />
|
||||
<path fill="url(#g)" d="M100 22l16 46h48l-39 28 15 46-40-27-40 27 15-46-39-28h48z"/>
|
||||
<circle cx="100" cy="100" r="16" fill="#0b1220" stroke="#22d3ee" stroke-width="6" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 679 B |
@@ -23,6 +23,36 @@ public record GraphSearchRequest
|
||||
public string? Cursor { get; init; }
|
||||
}
|
||||
|
||||
public record GraphQueryRequest
|
||||
{
|
||||
[JsonPropertyName("kinds")]
|
||||
public string[] Kinds { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("query")]
|
||||
public string? Query { get; init; }
|
||||
|
||||
[JsonPropertyName("filters")]
|
||||
public Dictionary<string, object>? Filters { get; init; }
|
||||
|
||||
[JsonPropertyName("limit")]
|
||||
public int? Limit { get; init; }
|
||||
|
||||
[JsonPropertyName("cursor")]
|
||||
public string? Cursor { get; init; }
|
||||
|
||||
[JsonPropertyName("includeEdges")]
|
||||
public bool IncludeEdges { get; init; } = true;
|
||||
|
||||
[JsonPropertyName("includeStats")]
|
||||
public bool IncludeStats { get; init; } = true;
|
||||
|
||||
[JsonPropertyName("includeOverlays")]
|
||||
public bool IncludeOverlays { get; init; } = false;
|
||||
|
||||
[JsonPropertyName("budget")]
|
||||
public GraphQueryBudget? Budget { get; init; }
|
||||
}
|
||||
|
||||
public static class SearchValidator
|
||||
{
|
||||
public static string? Validate(GraphSearchRequest req)
|
||||
@@ -51,6 +81,234 @@ public static class SearchValidator
|
||||
}
|
||||
}
|
||||
|
||||
public static class QueryValidator
|
||||
{
|
||||
public static string? Validate(GraphQueryRequest req)
|
||||
{
|
||||
if (req.Kinds is null || req.Kinds.Length == 0)
|
||||
{
|
||||
return "kinds is required";
|
||||
}
|
||||
|
||||
if (req.Limit.HasValue && (req.Limit.Value <= 0 || req.Limit.Value > 500))
|
||||
{
|
||||
return "limit must be between 1 and 500";
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(req.Query) && (req.Filters is null || req.Filters.Count == 0) && string.IsNullOrWhiteSpace(req.Cursor))
|
||||
{
|
||||
return "query or filters or cursor must be provided";
|
||||
}
|
||||
|
||||
if (req.Budget is not null)
|
||||
{
|
||||
if (req.Budget.Tiles.HasValue && (req.Budget.Tiles < 1 || req.Budget.Tiles > 6000))
|
||||
{
|
||||
return "budget.tiles must be between 1 and 5000";
|
||||
}
|
||||
|
||||
if (req.Budget.Nodes.HasValue && req.Budget.Nodes < 1)
|
||||
{
|
||||
return "budget.nodes must be >= 1";
|
||||
}
|
||||
|
||||
if (req.Budget.Edges.HasValue && req.Budget.Edges < 1)
|
||||
{
|
||||
return "budget.edges must be >= 1";
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public record GraphExportRequest
|
||||
{
|
||||
[JsonPropertyName("format")]
|
||||
public string Format { get; init; } = "ndjson"; // ndjson, csv, graphml, png, svg
|
||||
|
||||
[JsonPropertyName("includeEdges")]
|
||||
public bool IncludeEdges { get; init; } = true;
|
||||
|
||||
[JsonPropertyName("snapshotId")]
|
||||
public string? SnapshotId { get; init; }
|
||||
|
||||
[JsonPropertyName("kinds")]
|
||||
public string[]? Kinds { get; init; }
|
||||
|
||||
[JsonPropertyName("query")]
|
||||
public string? Query { get; init; }
|
||||
|
||||
[JsonPropertyName("filters")]
|
||||
public Dictionary<string, object>? Filters { get; init; }
|
||||
}
|
||||
|
||||
public static class ExportValidator
|
||||
{
|
||||
private static readonly HashSet<string> SupportedFormats = new(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
"ndjson", "csv", "graphml", "png", "svg"
|
||||
};
|
||||
|
||||
public static string? Validate(GraphExportRequest req)
|
||||
{
|
||||
if (!SupportedFormats.Contains(req.Format))
|
||||
{
|
||||
return "format must be one of ndjson,csv,graphml,png,svg";
|
||||
}
|
||||
|
||||
if (req.Kinds is not null && req.Kinds.Length == 0)
|
||||
{
|
||||
return "kinds cannot be empty array";
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public record GraphPathRequest
|
||||
{
|
||||
[JsonPropertyName("sources")]
|
||||
public string[] Sources { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("targets")]
|
||||
public string[] Targets { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("kinds")]
|
||||
public string[] Kinds { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("maxDepth")]
|
||||
public int? MaxDepth { get; init; }
|
||||
|
||||
[JsonPropertyName("filters")]
|
||||
public Dictionary<string, object>? Filters { get; init; }
|
||||
|
||||
[JsonPropertyName("includeOverlays")]
|
||||
public bool IncludeOverlays { get; init; } = false;
|
||||
|
||||
[JsonPropertyName("budget")]
|
||||
public GraphQueryBudget? Budget { get; init; }
|
||||
}
|
||||
|
||||
public static class PathValidator
|
||||
{
|
||||
public static string? Validate(GraphPathRequest req)
|
||||
{
|
||||
if (req.Sources is null || req.Sources.Length == 0)
|
||||
{
|
||||
return "sources is required";
|
||||
}
|
||||
|
||||
if (req.Targets is null || req.Targets.Length == 0)
|
||||
{
|
||||
return "targets is required";
|
||||
}
|
||||
|
||||
if (req.MaxDepth.HasValue && (req.MaxDepth.Value < 1 || req.MaxDepth.Value > 6))
|
||||
{
|
||||
return "maxDepth must be between 1 and 6";
|
||||
}
|
||||
|
||||
if (req.Budget is not null)
|
||||
{
|
||||
if (req.Budget.Tiles.HasValue && (req.Budget.Tiles < 1 || req.Budget.Tiles > 6000))
|
||||
{
|
||||
return "budget.tiles must be between 1 and 6000";
|
||||
}
|
||||
|
||||
if (req.Budget.Nodes.HasValue && req.Budget.Nodes < 1)
|
||||
{
|
||||
return "budget.nodes must be >= 1";
|
||||
}
|
||||
|
||||
if (req.Budget.Edges.HasValue && req.Budget.Edges < 1)
|
||||
{
|
||||
return "budget.edges must be >= 1";
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public record GraphDiffRequest
|
||||
{
|
||||
[JsonPropertyName("snapshotA")]
|
||||
public string SnapshotA { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("snapshotB")]
|
||||
public string SnapshotB { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("includeEdges")]
|
||||
public bool IncludeEdges { get; init; } = true;
|
||||
|
||||
[JsonPropertyName("includeStats")]
|
||||
public bool IncludeStats { get; init; } = true;
|
||||
|
||||
[JsonPropertyName("budget")]
|
||||
public GraphQueryBudget? Budget { get; init; }
|
||||
}
|
||||
|
||||
public static class DiffValidator
|
||||
{
|
||||
public static string? Validate(GraphDiffRequest req)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(req.SnapshotA))
|
||||
{
|
||||
return "snapshotA is required";
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(req.SnapshotB))
|
||||
{
|
||||
return "snapshotB is required";
|
||||
}
|
||||
|
||||
if (req.Budget is not null)
|
||||
{
|
||||
if (req.Budget.Tiles.HasValue && (req.Budget.Tiles < 1 || req.Budget.Tiles > 6000))
|
||||
{
|
||||
return "budget.tiles must be between 1 and 6000";
|
||||
}
|
||||
|
||||
if (req.Budget.Nodes.HasValue && req.Budget.Nodes < 1)
|
||||
{
|
||||
return "budget.nodes must be >= 1";
|
||||
}
|
||||
|
||||
if (req.Budget.Edges.HasValue && req.Budget.Edges < 1)
|
||||
{
|
||||
return "budget.edges must be >= 1";
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public record GraphQueryBudget
|
||||
{
|
||||
[JsonPropertyName("tiles")]
|
||||
public int? Tiles { get; init; }
|
||||
|
||||
[JsonPropertyName("nodes")]
|
||||
public int? Nodes { get; init; }
|
||||
|
||||
[JsonPropertyName("edges")]
|
||||
public int? Edges { get; init; }
|
||||
|
||||
public GraphQueryBudget ApplyDefaults()
|
||||
{
|
||||
return new GraphQueryBudget
|
||||
{
|
||||
Tiles = Tiles ?? 6000,
|
||||
Nodes = Nodes ?? 5000,
|
||||
Edges = Edges ?? 10000
|
||||
};
|
||||
}
|
||||
|
||||
public static GraphQueryBudget Default { get; } = new();
|
||||
}
|
||||
|
||||
public record CostBudget(int Limit, int Remaining, int Consumed);
|
||||
|
||||
public record NodeTile
|
||||
@@ -63,6 +321,22 @@ public record NodeTile
|
||||
public Dictionary<string, OverlayPayload>? Overlays { get; init; }
|
||||
}
|
||||
|
||||
public record EdgeTile
|
||||
{
|
||||
public string Id { get; init; } = string.Empty;
|
||||
public string Kind { get; init; } = "depends_on";
|
||||
public string Tenant { get; init; } = string.Empty;
|
||||
public string Source { get; init; } = string.Empty;
|
||||
public string Target { get; init; } = string.Empty;
|
||||
public Dictionary<string, object?> Attributes { get; init; } = new();
|
||||
}
|
||||
|
||||
public record StatsTile
|
||||
{
|
||||
public int Nodes { get; init; }
|
||||
public int Edges { get; init; }
|
||||
}
|
||||
|
||||
public record CursorTile(string Token, string ResumeUrl);
|
||||
|
||||
public record TileEnvelope(string Type, int Seq, object Data, CostBudget? Cost = null);
|
||||
@@ -76,3 +350,22 @@ public record ErrorResponse
|
||||
public object? Details { get; init; }
|
||||
public string? RequestId { get; init; }
|
||||
}
|
||||
|
||||
public record DiffTile
|
||||
{
|
||||
public string EntityType { get; init; } = string.Empty;
|
||||
public string ChangeType { get; init; } = string.Empty;
|
||||
public string Id { get; init; } = string.Empty;
|
||||
public object? Before { get; init; }
|
||||
public object? After { get; init; }
|
||||
}
|
||||
|
||||
public record DiffStatsTile
|
||||
{
|
||||
public int NodesAdded { get; init; }
|
||||
public int NodesRemoved { get; init; }
|
||||
public int NodesChanged { get; init; }
|
||||
public int EdgesAdded { get; init; }
|
||||
public int EdgesRemoved { get; init; }
|
||||
public int EdgesChanged { get; init; }
|
||||
}
|
||||
|
||||
19
src/Graph/StellaOps.Graph.Api/Deploy/HEALTH.md
Normal file
19
src/Graph/StellaOps.Graph.Api/Deploy/HEALTH.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Graph API Deploy Health Checks
|
||||
|
||||
- **Readiness**: `GET /healthz` on port 8080
|
||||
- **Liveness**: `GET /healthz` on port 8080
|
||||
- Expected latency: < 200ms on local/dev.
|
||||
- Failing conditions:
|
||||
- Missing `X-Stella-Tenant` header on app routes returns 400 but healthz remains 200.
|
||||
- Rate limiting does not apply to `/healthz`.
|
||||
|
||||
Smoke test (once deployed):
|
||||
```bash
|
||||
curl -i http://localhost:8080/healthz
|
||||
curl -i -X POST http://localhost:8080/graph/search \
|
||||
-H "X-Stella-Tenant: demo" \
|
||||
-H "X-Stella-Scopes: graph:read graph:query" \
|
||||
-H "Authorization: bearer demo" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"kinds":["component"],"query":"pkg:"}'
|
||||
```
|
||||
18
src/Graph/StellaOps.Graph.Api/Deploy/docker-compose.yaml
Normal file
18
src/Graph/StellaOps.Graph.Api/Deploy/docker-compose.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
graph-api:
|
||||
image: stellaops/graph-api:latest
|
||||
container_name: stellaops-graph-api
|
||||
environment:
|
||||
ASPNETCORE_URLS: "http://0.0.0.0:8080"
|
||||
STELLAOPS_GRAPH_SNAPSHOT_DIR: "/data/snapshots"
|
||||
ports:
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- ./data/snapshots:/data/snapshots
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/healthz"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
85
src/Graph/StellaOps.Graph.Api/Deploy/kubernetes.yaml
Normal file
85
src/Graph/StellaOps.Graph.Api/Deploy/kubernetes.yaml
Normal file
@@ -0,0 +1,85 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: stellaops-graph-api
|
||||
labels:
|
||||
app: stellaops-graph-api
|
||||
spec:
|
||||
replicas: 2
|
||||
selector:
|
||||
matchLabels:
|
||||
app: stellaops-graph-api
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: stellaops-graph-api
|
||||
spec:
|
||||
containers:
|
||||
- name: graph-api
|
||||
image: stellaops/graph-api:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: ASPNETCORE_URLS
|
||||
value: http://0.0.0.0:8080
|
||||
- name: STELLAOPS_GRAPH_SNAPSHOT_DIR
|
||||
value: /var/lib/stellaops/graph/snapshots
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /healthz
|
||||
port: 8080
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 10
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /healthz
|
||||
port: 8080
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 20
|
||||
resources:
|
||||
requests:
|
||||
cpu: 200m
|
||||
memory: 256Mi
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
volumeMounts:
|
||||
- name: snapshots
|
||||
mountPath: /var/lib/stellaops/graph/snapshots
|
||||
volumes:
|
||||
- name: snapshots
|
||||
emptyDir: {}
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: stellaops-graph-api
|
||||
labels:
|
||||
app: stellaops-graph-api
|
||||
spec:
|
||||
selector:
|
||||
app: stellaops-graph-api
|
||||
ports:
|
||||
- name: http
|
||||
protocol: TCP
|
||||
port: 80
|
||||
targetPort: 8080
|
||||
---
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: stellaops-graph-api
|
||||
annotations:
|
||||
nginx.ingress.kubernetes.io/proxy-body-size: "25m"
|
||||
spec:
|
||||
rules:
|
||||
- http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: stellaops-graph-api
|
||||
port:
|
||||
number: 80
|
||||
@@ -3,14 +3,24 @@ using StellaOps.Graph.Api.Services;
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
builder.Services.AddMemoryCache();
|
||||
builder.Services.AddSingleton<InMemoryGraphRepository>();
|
||||
builder.Services.AddSingleton<IGraphSearchService, InMemoryGraphSearchService>();
|
||||
builder.Services.AddScoped<IGraphSearchService, InMemoryGraphSearchService>();
|
||||
builder.Services.AddScoped<IGraphQueryService, InMemoryGraphQueryService>();
|
||||
builder.Services.AddScoped<IGraphPathService, InMemoryGraphPathService>();
|
||||
builder.Services.AddScoped<IGraphDiffService, InMemoryGraphDiffService>();
|
||||
builder.Services.AddScoped<IOverlayService, InMemoryOverlayService>();
|
||||
builder.Services.AddScoped<IGraphExportService, InMemoryGraphExportService>();
|
||||
builder.Services.AddSingleton<IRateLimiter>(_ => new RateLimiterService(limitPerWindow: 120));
|
||||
builder.Services.AddSingleton<IAuditLogger, InMemoryAuditLogger>();
|
||||
builder.Services.AddSingleton<IGraphMetrics, GraphMetrics>();
|
||||
var app = builder.Build();
|
||||
|
||||
app.UseRouting();
|
||||
|
||||
app.MapPost("/graph/search", async (HttpContext context, GraphSearchRequest request, IGraphSearchService service, CancellationToken ct) =>
|
||||
{
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
context.Response.ContentType = "application/x-ndjson";
|
||||
var tenant = context.Request.Headers["X-Stella-Tenant"].FirstOrDefault();
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
@@ -25,10 +35,28 @@ app.MapPost("/graph/search", async (HttpContext context, GraphSearchRequest requ
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
if (!RateLimit(context, "/graph/search"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status429TooManyRequests, "GRAPH_RATE_LIMITED", "Too many requests", ct);
|
||||
LogAudit(context, "/graph/search", StatusCodes.Status429TooManyRequests, sw.ElapsedMilliseconds);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
var scopes = context.Request.Headers["X-Stella-Scopes"]
|
||||
.SelectMany(v => v.Split(new[] { ' ', ',', ';' }, StringSplitOptions.RemoveEmptyEntries))
|
||||
.ToHashSet(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (!scopes.Contains("graph:read") && !scopes.Contains("graph:query"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status403Forbidden, "GRAPH_FORBIDDEN", "Missing graph:read or graph:query scope", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
var validation = SearchValidator.Validate(request);
|
||||
if (validation is not null)
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status400BadRequest, "GRAPH_VALIDATION_FAILED", validation, ct);
|
||||
LogAudit(context, "/graph/search", StatusCodes.Status400BadRequest, sw.ElapsedMilliseconds);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
@@ -38,10 +66,242 @@ app.MapPost("/graph/search", async (HttpContext context, GraphSearchRequest requ
|
||||
await context.Response.WriteAsync("\n", ct);
|
||||
await context.Response.Body.FlushAsync(ct);
|
||||
}
|
||||
LogAudit(context, "/graph/search", StatusCodes.Status200OK, sw.ElapsedMilliseconds);
|
||||
|
||||
return Results.Empty;
|
||||
});
|
||||
|
||||
app.MapPost("/graph/query", async (HttpContext context, GraphQueryRequest request, IGraphQueryService service, CancellationToken ct) =>
|
||||
{
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
context.Response.ContentType = "application/x-ndjson";
|
||||
var tenant = context.Request.Headers["X-Stella-Tenant"].FirstOrDefault();
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status400BadRequest, "GRAPH_VALIDATION_FAILED", "Missing X-Stella-Tenant header", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
if (!context.Request.Headers.ContainsKey("Authorization"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status401Unauthorized, "GRAPH_UNAUTHORIZED", "Missing Authorization header", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
if (!RateLimit(context, "/graph/query"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status429TooManyRequests, "GRAPH_RATE_LIMITED", "Too many requests", ct);
|
||||
LogAudit(context, "/graph/query", StatusCodes.Status429TooManyRequests, sw.ElapsedMilliseconds);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
var scopes = context.Request.Headers["X-Stella-Scopes"]
|
||||
.SelectMany(v => v.Split(new[] { ' ', ',', ';' }, StringSplitOptions.RemoveEmptyEntries))
|
||||
.ToHashSet(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (!scopes.Contains("graph:query"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status403Forbidden, "GRAPH_FORBIDDEN", "Missing graph:query scope", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
var validation = QueryValidator.Validate(request);
|
||||
if (validation is not null)
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status400BadRequest, "GRAPH_VALIDATION_FAILED", validation, ct);
|
||||
LogAudit(context, "/graph/query", StatusCodes.Status400BadRequest, sw.ElapsedMilliseconds);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
await foreach (var line in service.QueryAsync(tenant!, request, ct))
|
||||
{
|
||||
await context.Response.WriteAsync(line, ct);
|
||||
await context.Response.WriteAsync("\n", ct);
|
||||
await context.Response.Body.FlushAsync(ct);
|
||||
}
|
||||
LogAudit(context, "/graph/query", StatusCodes.Status200OK, sw.ElapsedMilliseconds);
|
||||
|
||||
return Results.Empty;
|
||||
});
|
||||
|
||||
app.MapPost("/graph/paths", async (HttpContext context, GraphPathRequest request, IGraphPathService service, CancellationToken ct) =>
|
||||
{
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
context.Response.ContentType = "application/x-ndjson";
|
||||
var tenant = context.Request.Headers["X-Stella-Tenant"].FirstOrDefault();
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status400BadRequest, "GRAPH_VALIDATION_FAILED", "Missing X-Stella-Tenant header", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
if (!context.Request.Headers.ContainsKey("Authorization"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status401Unauthorized, "GRAPH_UNAUTHORIZED", "Missing Authorization header", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
if (!RateLimit(context, "/graph/paths"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status429TooManyRequests, "GRAPH_RATE_LIMITED", "Too many requests", ct);
|
||||
LogAudit(context, "/graph/paths", StatusCodes.Status429TooManyRequests, sw.ElapsedMilliseconds);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
var scopes = context.Request.Headers["X-Stella-Scopes"]
|
||||
.SelectMany(v => v.Split(new[] { ' ', ',', ';' }, StringSplitOptions.RemoveEmptyEntries))
|
||||
.ToHashSet(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (!scopes.Contains("graph:query"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status403Forbidden, "GRAPH_FORBIDDEN", "Missing graph:query scope", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
var validation = PathValidator.Validate(request);
|
||||
if (validation is not null)
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status400BadRequest, "GRAPH_VALIDATION_FAILED", validation, ct);
|
||||
LogAudit(context, "/graph/paths", StatusCodes.Status400BadRequest, sw.ElapsedMilliseconds);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
await foreach (var line in service.FindPathsAsync(tenant!, request, ct))
|
||||
{
|
||||
await context.Response.WriteAsync(line, ct);
|
||||
await context.Response.WriteAsync("\n", ct);
|
||||
await context.Response.Body.FlushAsync(ct);
|
||||
}
|
||||
LogAudit(context, "/graph/paths", StatusCodes.Status200OK, sw.ElapsedMilliseconds);
|
||||
|
||||
return Results.Empty;
|
||||
});
|
||||
|
||||
app.MapPost("/graph/diff", async (HttpContext context, GraphDiffRequest request, IGraphDiffService service, CancellationToken ct) =>
|
||||
{
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
context.Response.ContentType = "application/x-ndjson";
|
||||
var tenant = context.Request.Headers["X-Stella-Tenant"].FirstOrDefault();
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status400BadRequest, "GRAPH_VALIDATION_FAILED", "Missing X-Stella-Tenant header", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
if (!context.Request.Headers.ContainsKey("Authorization"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status401Unauthorized, "GRAPH_UNAUTHORIZED", "Missing Authorization header", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
if (!RateLimit(context, "/graph/diff"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status429TooManyRequests, "GRAPH_RATE_LIMITED", "Too many requests", ct);
|
||||
LogAudit(context, "/graph/diff", StatusCodes.Status429TooManyRequests, sw.ElapsedMilliseconds);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
var scopes = context.Request.Headers["X-Stella-Scopes"]
|
||||
.SelectMany(v => v.Split(new[] { ' ', ',', ';' }, StringSplitOptions.RemoveEmptyEntries))
|
||||
.ToHashSet(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (!scopes.Contains("graph:query"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status403Forbidden, "GRAPH_FORBIDDEN", "Missing graph:query scope", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
var validation = DiffValidator.Validate(request);
|
||||
if (validation is not null)
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status400BadRequest, "GRAPH_VALIDATION_FAILED", validation, ct);
|
||||
LogAudit(context, "/graph/diff", StatusCodes.Status400BadRequest, sw.ElapsedMilliseconds);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
await foreach (var line in service.DiffAsync(tenant!, request, ct))
|
||||
{
|
||||
await context.Response.WriteAsync(line, ct);
|
||||
await context.Response.WriteAsync("\n", ct);
|
||||
await context.Response.Body.FlushAsync(ct);
|
||||
}
|
||||
LogAudit(context, "/graph/diff", StatusCodes.Status200OK, sw.ElapsedMilliseconds);
|
||||
|
||||
return Results.Empty;
|
||||
});
|
||||
|
||||
app.MapPost("/graph/export", async (HttpContext context, GraphExportRequest request, IGraphExportService service, CancellationToken ct) =>
|
||||
{
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
var tenant = context.Request.Headers["X-Stella-Tenant"].FirstOrDefault();
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status400BadRequest, "GRAPH_VALIDATION_FAILED", "Missing X-Stella-Tenant header", ct);
|
||||
LogAudit(context, "/graph/export", StatusCodes.Status400BadRequest, sw.ElapsedMilliseconds);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
if (!context.Request.Headers.ContainsKey("Authorization"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status401Unauthorized, "GRAPH_UNAUTHORIZED", "Missing Authorization header", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
var scopes = context.Request.Headers["X-Stella-Scopes"]
|
||||
.SelectMany(v => v.Split(new[] { ' ', ',', ';' }, StringSplitOptions.RemoveEmptyEntries))
|
||||
.ToHashSet(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (!scopes.Contains("graph:export"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status403Forbidden, "GRAPH_FORBIDDEN", "Missing graph:export scope", ct);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
if (!RateLimit(context, "/graph/export"))
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status429TooManyRequests, "GRAPH_RATE_LIMITED", "Too many requests", ct);
|
||||
LogAudit(context, "/graph/export", StatusCodes.Status429TooManyRequests, sw.ElapsedMilliseconds);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
var validation = ExportValidator.Validate(request);
|
||||
if (validation is not null)
|
||||
{
|
||||
await WriteError(context, StatusCodes.Status400BadRequest, "GRAPH_VALIDATION_FAILED", validation, ct);
|
||||
LogAudit(context, "/graph/export", StatusCodes.Status400BadRequest, sw.ElapsedMilliseconds);
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
var job = await service.StartExportAsync(tenant!, request, ct);
|
||||
var manifest = new
|
||||
{
|
||||
jobId = job.JobId,
|
||||
status = "completed",
|
||||
format = job.Format,
|
||||
sha256 = job.Sha256,
|
||||
size = job.SizeBytes,
|
||||
downloadUrl = $"/graph/export/{job.JobId}",
|
||||
completedAt = job.CompletedAt
|
||||
};
|
||||
LogAudit(context, "/graph/export", StatusCodes.Status200OK, sw.ElapsedMilliseconds);
|
||||
return Results.Ok(manifest);
|
||||
});
|
||||
|
||||
app.MapGet("/graph/export/{jobId}", (string jobId, HttpContext context, IGraphExportService service) =>
|
||||
{
|
||||
var job = service.Get(jobId);
|
||||
if (job is null)
|
||||
{
|
||||
return Results.NotFound(new ErrorResponse { Error = "GRAPH_EXPORT_NOT_FOUND", Message = "Export job not found" });
|
||||
}
|
||||
|
||||
context.Response.Headers.ContentLength = job.Payload.Length;
|
||||
context.Response.Headers["X-Content-SHA256"] = job.Sha256;
|
||||
return Results.File(job.Payload, job.ContentType, $"graph-export-{job.JobId}.{job.Format}");
|
||||
});
|
||||
|
||||
app.MapGet("/healthz", () => Results.Ok(new { status = "ok" }));
|
||||
|
||||
app.Run();
|
||||
|
||||
static async Task WriteError(HttpContext ctx, int status, string code, string message, CancellationToken ct)
|
||||
@@ -54,3 +314,30 @@ static async Task WriteError(HttpContext ctx, int status, string code, string me
|
||||
});
|
||||
await ctx.Response.WriteAsync(payload + "\n", ct);
|
||||
}
|
||||
|
||||
static bool RateLimit(HttpContext ctx, string route)
|
||||
{
|
||||
var limiter = ctx.RequestServices.GetRequiredService<IRateLimiter>();
|
||||
var tenant = ctx.Request.Headers["X-Stella-Tenant"].FirstOrDefault() ?? "unknown";
|
||||
return limiter.Allow(tenant, route);
|
||||
}
|
||||
|
||||
static void LogAudit(HttpContext ctx, string route, int statusCode, long durationMs)
|
||||
{
|
||||
var logger = ctx.RequestServices.GetRequiredService<IAuditLogger>();
|
||||
var tenant = ctx.Request.Headers["X-Stella-Tenant"].FirstOrDefault() ?? "unknown";
|
||||
var actor = ctx.Request.Headers["Authorization"].FirstOrDefault() ?? "anonymous";
|
||||
var scopes = ctx.Request.Headers["X-Stella-Scopes"]
|
||||
.SelectMany(v => v.Split(new[] { ' ', ',', ';' }, StringSplitOptions.RemoveEmptyEntries))
|
||||
.ToArray();
|
||||
|
||||
logger.Log(new AuditEvent(
|
||||
Timestamp: DateTimeOffset.UtcNow,
|
||||
Tenant: tenant,
|
||||
Route: route,
|
||||
Method: ctx.Request.Method,
|
||||
Actor: actor,
|
||||
Scopes: scopes,
|
||||
StatusCode: statusCode,
|
||||
DurationMs: durationMs));
|
||||
}
|
||||
|
||||
40
src/Graph/StellaOps.Graph.Api/Services/GraphMetrics.cs
Normal file
40
src/Graph/StellaOps.Graph.Api/Services/GraphMetrics.cs
Normal file
@@ -0,0 +1,40 @@
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public interface IGraphMetrics : IDisposable
|
||||
{
|
||||
Counter<long> BudgetDenied { get; }
|
||||
Histogram<double> QueryLatencySeconds { get; }
|
||||
Counter<long> OverlayCacheHit { get; }
|
||||
Counter<long> OverlayCacheMiss { get; }
|
||||
Histogram<double> ExportLatencySeconds { get; }
|
||||
Meter Meter { get; }
|
||||
}
|
||||
|
||||
public sealed class GraphMetrics : IGraphMetrics
|
||||
{
|
||||
private readonly Meter _meter;
|
||||
|
||||
public GraphMetrics()
|
||||
{
|
||||
_meter = new Meter("StellaOps.Graph.Api", "1.0.0");
|
||||
BudgetDenied = _meter.CreateCounter<long>("graph_query_budget_denied_total");
|
||||
QueryLatencySeconds = _meter.CreateHistogram<double>("graph_tile_latency_seconds", unit: "s");
|
||||
OverlayCacheHit = _meter.CreateCounter<long>("graph_overlay_cache_hits_total");
|
||||
OverlayCacheMiss = _meter.CreateCounter<long>("graph_overlay_cache_misses_total");
|
||||
ExportLatencySeconds = _meter.CreateHistogram<double>("graph_export_latency_seconds", unit: "s");
|
||||
}
|
||||
|
||||
public Counter<long> BudgetDenied { get; }
|
||||
public Histogram<double> QueryLatencySeconds { get; }
|
||||
public Counter<long> OverlayCacheHit { get; }
|
||||
public Counter<long> OverlayCacheMiss { get; }
|
||||
public Histogram<double> ExportLatencySeconds { get; }
|
||||
public Meter Meter => _meter;
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
_meter.Dispose();
|
||||
}
|
||||
}
|
||||
44
src/Graph/StellaOps.Graph.Api/Services/IAuditLogger.cs
Normal file
44
src/Graph/StellaOps.Graph.Api/Services/IAuditLogger.cs
Normal file
@@ -0,0 +1,44 @@
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public record AuditEvent(
|
||||
DateTimeOffset Timestamp,
|
||||
string Tenant,
|
||||
string Route,
|
||||
string Method,
|
||||
string Actor,
|
||||
string[] Scopes,
|
||||
int StatusCode,
|
||||
long DurationMs);
|
||||
|
||||
public interface IAuditLogger
|
||||
{
|
||||
void Log(AuditEvent evt);
|
||||
IReadOnlyList<AuditEvent> GetRecent(int max = 100);
|
||||
}
|
||||
|
||||
public sealed class InMemoryAuditLogger : IAuditLogger
|
||||
{
|
||||
private readonly LinkedList<AuditEvent> _events = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public void Log(AuditEvent evt)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_events.AddFirst(evt);
|
||||
while (_events.Count > 500)
|
||||
{
|
||||
_events.RemoveLast();
|
||||
}
|
||||
}
|
||||
Console.WriteLine($"[AUDIT] {evt.Timestamp:O} tenant={evt.Tenant} route={evt.Route} status={evt.StatusCode} scopes={string.Join(' ', evt.Scopes)} duration_ms={evt.DurationMs}");
|
||||
}
|
||||
|
||||
public IReadOnlyList<AuditEvent> GetRecent(int max = 100)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _events.Take(max).ToList();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public interface IGraphDiffService
|
||||
{
|
||||
IAsyncEnumerable<string> DiffAsync(string tenant, GraphDiffRequest request, CancellationToken ct = default);
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public record GraphExportJob(string JobId, string Tenant, string Format, string ContentType, byte[] Payload, string Sha256, long SizeBytes, DateTimeOffset CompletedAt);
|
||||
|
||||
public interface IGraphExportService
|
||||
{
|
||||
Task<GraphExportJob> StartExportAsync(string tenant, GraphExportRequest request, CancellationToken ct = default);
|
||||
GraphExportJob? Get(string jobId);
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public interface IGraphPathService
|
||||
{
|
||||
IAsyncEnumerable<string> FindPathsAsync(string tenant, GraphPathRequest request, CancellationToken ct = default);
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public interface IGraphQueryService
|
||||
{
|
||||
IAsyncEnumerable<string> QueryAsync(string tenant, GraphQueryRequest request, CancellationToken ct = default);
|
||||
}
|
||||
12
src/Graph/StellaOps.Graph.Api/Services/IOverlayService.cs
Normal file
12
src/Graph/StellaOps.Graph.Api/Services/IOverlayService.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public interface IOverlayService
|
||||
{
|
||||
Task<IDictionary<string, Dictionary<string, OverlayPayload>>> GetOverlaysAsync(
|
||||
string tenant,
|
||||
IEnumerable<string> nodeIds,
|
||||
bool sampleExplain,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
@@ -0,0 +1,166 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public sealed class InMemoryGraphDiffService : IGraphDiffService
|
||||
{
|
||||
private readonly InMemoryGraphRepository _repository;
|
||||
private static readonly JsonSerializerOptions Options = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public InMemoryGraphDiffService(InMemoryGraphRepository repository)
|
||||
{
|
||||
_repository = repository;
|
||||
}
|
||||
|
||||
public async IAsyncEnumerable<string> DiffAsync(string tenant, GraphDiffRequest request, [EnumeratorCancellation] CancellationToken ct = default)
|
||||
{
|
||||
var budget = (request.Budget?.ApplyDefaults()) ?? GraphQueryBudget.Default.ApplyDefaults();
|
||||
var tileBudgetLimit = Math.Clamp(budget.Tiles ?? 6000, 1, 6000);
|
||||
var nodeBudgetRemaining = budget.Nodes ?? 5000;
|
||||
var edgeBudgetRemaining = budget.Edges ?? 10000;
|
||||
var budgetRemaining = tileBudgetLimit;
|
||||
var seq = 0;
|
||||
|
||||
var snapA = _repository.GetSnapshot(tenant, request.SnapshotA);
|
||||
var snapB = _repository.GetSnapshot(tenant, request.SnapshotB);
|
||||
|
||||
if (snapA is null || snapB is null)
|
||||
{
|
||||
var error = new ErrorResponse
|
||||
{
|
||||
Error = "GRAPH_SNAPSHOT_NOT_FOUND",
|
||||
Message = "One or both snapshots are missing.",
|
||||
Details = new { request.SnapshotA, request.SnapshotB }
|
||||
};
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("error", seq++, error, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
yield break;
|
||||
}
|
||||
|
||||
var nodesA = snapA.Value.Nodes.ToDictionary(n => n.Id, StringComparer.Ordinal);
|
||||
var nodesB = snapB.Value.Nodes.ToDictionary(n => n.Id, StringComparer.Ordinal);
|
||||
var edgesA = snapA.Value.Edges.ToDictionary(e => e.Id, StringComparer.Ordinal);
|
||||
var edgesB = snapB.Value.Edges.ToDictionary(e => e.Id, StringComparer.Ordinal);
|
||||
|
||||
foreach (var added in nodesB.Values.Where(n => !nodesA.ContainsKey(n.Id)).OrderBy(n => n.Id, StringComparer.Ordinal))
|
||||
{
|
||||
if (!Spend(ref budgetRemaining, ref nodeBudgetRemaining, tileBudgetLimit, seq, out var tile)) { yield return tile!; yield break; }
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("node_added", seq++, added, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
}
|
||||
|
||||
foreach (var removed in nodesA.Values.Where(n => !nodesB.ContainsKey(n.Id)).OrderBy(n => n.Id, StringComparer.Ordinal))
|
||||
{
|
||||
if (!Spend(ref budgetRemaining, ref nodeBudgetRemaining, tileBudgetLimit, seq, out var tile)) { yield return tile!; yield break; }
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("node_removed", seq++, removed, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
}
|
||||
|
||||
foreach (var common in nodesA.Keys.Intersect(nodesB.Keys, StringComparer.Ordinal).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
var a = nodesA[common];
|
||||
var b = nodesB[common];
|
||||
if (!AttributesEqual(a.Attributes, b.Attributes))
|
||||
{
|
||||
if (!Spend(ref budgetRemaining, ref nodeBudgetRemaining, tileBudgetLimit, seq, out var tile)) { yield return tile!; yield break; }
|
||||
var diff = new DiffTile
|
||||
{
|
||||
EntityType = "node",
|
||||
ChangeType = "changed",
|
||||
Id = common,
|
||||
Before = a,
|
||||
After = b
|
||||
};
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("node_changed", seq++, diff, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
}
|
||||
}
|
||||
|
||||
if (request.IncludeEdges)
|
||||
{
|
||||
foreach (var added in edgesB.Values.Where(e => !edgesA.ContainsKey(e.Id)).OrderBy(e => e.Id, StringComparer.Ordinal))
|
||||
{
|
||||
if (!Spend(ref budgetRemaining, ref edgeBudgetRemaining, tileBudgetLimit, seq, out var tile)) { yield return tile!; yield break; }
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("edge_added", seq++, added, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
}
|
||||
|
||||
foreach (var removed in edgesA.Values.Where(e => !edgesB.ContainsKey(e.Id)).OrderBy(e => e.Id, StringComparer.Ordinal))
|
||||
{
|
||||
if (!Spend(ref budgetRemaining, ref edgeBudgetRemaining, tileBudgetLimit, seq, out var tile)) { yield return tile!; yield break; }
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("edge_removed", seq++, removed, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
}
|
||||
|
||||
foreach (var common in edgesA.Keys.Intersect(edgesB.Keys, StringComparer.Ordinal).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
var a = edgesA[common];
|
||||
var b = edgesB[common];
|
||||
if (!AttributesEqual(a.Attributes, b.Attributes))
|
||||
{
|
||||
if (!Spend(ref budgetRemaining, ref edgeBudgetRemaining, tileBudgetLimit, seq, out var tile)) { yield return tile!; yield break; }
|
||||
var diff = new DiffTile
|
||||
{
|
||||
EntityType = "edge",
|
||||
ChangeType = "changed",
|
||||
Id = common,
|
||||
Before = a,
|
||||
After = b
|
||||
};
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("edge_changed", seq++, diff, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (request.IncludeStats && budgetRemaining > 0)
|
||||
{
|
||||
var stats = new DiffStatsTile
|
||||
{
|
||||
NodesAdded = nodesB.Count(n => !nodesA.ContainsKey(n.Key)),
|
||||
NodesRemoved = nodesA.Count(n => !nodesB.ContainsKey(n.Key)),
|
||||
NodesChanged = nodesA.Keys.Intersect(nodesB.Keys, StringComparer.Ordinal).Count(id => !AttributesEqual(nodesA[id].Attributes, nodesB[id].Attributes)),
|
||||
EdgesAdded = request.IncludeEdges ? edgesB.Count(e => !edgesA.ContainsKey(e.Key)) : 0,
|
||||
EdgesRemoved = request.IncludeEdges ? edgesA.Count(e => !edgesB.ContainsKey(e.Key)) : 0,
|
||||
EdgesChanged = request.IncludeEdges ? edgesA.Keys.Intersect(edgesB.Keys, StringComparer.Ordinal).Count(id => !AttributesEqual(edgesA[id].Attributes, edgesB[id].Attributes)) : 0
|
||||
};
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("stats", seq++, stats, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
private static bool Spend(ref int budgetRemaining, ref int entityBudget, int limit, int seq, out string? tile)
|
||||
{
|
||||
if (budgetRemaining <= 0 || entityBudget <= 0)
|
||||
{
|
||||
tile = JsonSerializer.Serialize(new TileEnvelope("error", seq, new ErrorResponse
|
||||
{
|
||||
Error = "GRAPH_BUDGET_EXCEEDED",
|
||||
Message = "Diff exceeded budget."
|
||||
}, Cost(limit, budgetRemaining)), Options);
|
||||
return false;
|
||||
}
|
||||
|
||||
budgetRemaining--;
|
||||
entityBudget--;
|
||||
tile = null;
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool AttributesEqual(IDictionary<string, object?> a, IDictionary<string, object?> b)
|
||||
{
|
||||
if (a.Count != b.Count) return false;
|
||||
foreach (var kvp in a)
|
||||
{
|
||||
if (!b.TryGetValue(kvp.Key, out var other)) return false;
|
||||
if (!(kvp.Value?.ToString() ?? string.Empty).Equals(other?.ToString() ?? string.Empty, StringComparison.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static CostBudget Cost(int limit, int remaining) =>
|
||||
new(limit, remaining - 1, limit - (remaining - 1));
|
||||
}
|
||||
@@ -0,0 +1,151 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Xml.Linq;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public sealed class InMemoryGraphExportService : IGraphExportService
|
||||
{
|
||||
private readonly InMemoryGraphRepository _repository;
|
||||
private readonly IGraphMetrics _metrics;
|
||||
private readonly Dictionary<string, GraphExportJob> _jobs = new(StringComparer.Ordinal);
|
||||
|
||||
public InMemoryGraphExportService(InMemoryGraphRepository repository, IGraphMetrics metrics)
|
||||
{
|
||||
_repository = repository;
|
||||
_metrics = metrics;
|
||||
}
|
||||
|
||||
public async Task<GraphExportJob> StartExportAsync(string tenant, GraphExportRequest request, CancellationToken ct = default)
|
||||
{
|
||||
// For now exports complete synchronously; job model kept for future async workers.
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
var (nodes, edges) = ResolveGraph(tenant, request);
|
||||
var (payload, contentType) = request.Format.ToLowerInvariant() switch
|
||||
{
|
||||
"ndjson" => (ExportNdjson(nodes, edges, request.IncludeEdges), "application/x-ndjson"),
|
||||
"csv" => (ExportCsv(nodes, edges, request.IncludeEdges), "text/csv"),
|
||||
"graphml" => (ExportGraphml(nodes, edges, request.IncludeEdges), "application/graphml+xml"),
|
||||
"png" => (ExportPlaceholder("png"), "image/png"),
|
||||
"svg" => (ExportPlaceholder("svg"), "image/svg+xml"),
|
||||
_ => (ExportNdjson(nodes, edges, request.IncludeEdges), "application/x-ndjson")
|
||||
};
|
||||
|
||||
var sha = ComputeSha256(payload);
|
||||
var jobId = $"job-{Guid.NewGuid():N}";
|
||||
var job = new GraphExportJob(jobId, tenant, request.Format, contentType, payload, sha, payload.Length, DateTimeOffset.UtcNow);
|
||||
_jobs[jobId] = job;
|
||||
sw.Stop();
|
||||
_metrics.ExportLatencySeconds.Record(sw.Elapsed.TotalSeconds, new KeyValuePair<string, object?>("format", request.Format));
|
||||
await Task.CompletedTask;
|
||||
return job;
|
||||
}
|
||||
|
||||
public GraphExportJob? Get(string jobId)
|
||||
{
|
||||
_jobs.TryGetValue(jobId, out var job);
|
||||
return job;
|
||||
}
|
||||
|
||||
private (IReadOnlyList<NodeTile> Nodes, IReadOnlyList<EdgeTile> Edges) ResolveGraph(string tenant, GraphExportRequest request)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(request.SnapshotId))
|
||||
{
|
||||
var snap = _repository.GetSnapshot(tenant, request.SnapshotId!);
|
||||
if (snap is not null) return snap.Value;
|
||||
}
|
||||
|
||||
var graphReq = new GraphQueryRequest
|
||||
{
|
||||
Kinds = request.Kinds ?? Array.Empty<string>(),
|
||||
Query = request.Query,
|
||||
Filters = request.Filters,
|
||||
IncludeEdges = request.IncludeEdges,
|
||||
Limit = 5000 // bounded export for in-memory demo
|
||||
};
|
||||
var (nodes, edges) = _repository.QueryGraph(tenant, graphReq);
|
||||
return (nodes, edges);
|
||||
}
|
||||
|
||||
private static byte[] ExportNdjson(IReadOnlyList<NodeTile> nodes, IReadOnlyList<EdgeTile> edges, bool includeEdges)
|
||||
{
|
||||
var lines = new List<string>(nodes.Count + (includeEdges ? edges.Count : 0));
|
||||
foreach (var n in nodes.OrderBy(n => n.Id, StringComparer.Ordinal))
|
||||
{
|
||||
lines.Add(System.Text.Json.JsonSerializer.Serialize(new { type = "node", data = n }, GraphQueryJson.Options));
|
||||
}
|
||||
if (includeEdges)
|
||||
{
|
||||
foreach (var e in edges.OrderBy(e => e.Id, StringComparer.Ordinal))
|
||||
{
|
||||
lines.Add(System.Text.Json.JsonSerializer.Serialize(new { type = "edge", data = e }, GraphQueryJson.Options));
|
||||
}
|
||||
}
|
||||
return Encoding.UTF8.GetBytes(string.Join("\n", lines));
|
||||
}
|
||||
|
||||
private static byte[] ExportCsv(IReadOnlyList<NodeTile> nodes, IReadOnlyList<EdgeTile> edges, bool includeEdges)
|
||||
{
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine("type,id,kind,tenant,source,target");
|
||||
foreach (var n in nodes.OrderBy(n => n.Id, StringComparer.Ordinal))
|
||||
{
|
||||
sb.AppendLine($"node,\"{n.Id}\",{n.Kind},{n.Tenant},,");
|
||||
}
|
||||
if (includeEdges)
|
||||
{
|
||||
foreach (var e in edges.OrderBy(e => e.Id, StringComparer.Ordinal))
|
||||
{
|
||||
sb.AppendLine($"edge,\"{e.Id}\",{e.Kind},{e.Tenant},\"{e.Source}\",\"{e.Target}\"");
|
||||
}
|
||||
}
|
||||
return Encoding.UTF8.GetBytes(sb.ToString());
|
||||
}
|
||||
|
||||
private static byte[] ExportGraphml(IReadOnlyList<NodeTile> nodes, IReadOnlyList<EdgeTile> edges, bool includeEdges)
|
||||
{
|
||||
XNamespace ns = "http://graphml.graphdrawing.org/xmlns";
|
||||
var g = new XElement(ns + "graph",
|
||||
new XAttribute("id", "g0"),
|
||||
new XAttribute("edgedefault", "directed"));
|
||||
|
||||
foreach (var n in nodes.OrderBy(n => n.Id, StringComparer.Ordinal))
|
||||
{
|
||||
g.Add(new XElement(ns + "node", new XAttribute("id", n.Id)));
|
||||
}
|
||||
|
||||
if (includeEdges)
|
||||
{
|
||||
foreach (var e in edges.OrderBy(e => e.Id, StringComparer.Ordinal))
|
||||
{
|
||||
g.Add(new XElement(ns + "edge",
|
||||
new XAttribute("id", e.Id),
|
||||
new XAttribute("source", e.Source),
|
||||
new XAttribute("target", e.Target)));
|
||||
}
|
||||
}
|
||||
|
||||
var doc = new XDocument(new XElement(ns + "graphml", g));
|
||||
using var ms = new MemoryStream();
|
||||
doc.Save(ms);
|
||||
return ms.ToArray();
|
||||
}
|
||||
|
||||
private static byte[] ExportPlaceholder(string format) =>
|
||||
Encoding.UTF8.GetBytes($"placeholder-{format}-export");
|
||||
|
||||
private static string ComputeSha256(byte[] payload)
|
||||
{
|
||||
using var sha = SHA256.Create();
|
||||
return Convert.ToHexString(sha.ComputeHash(payload)).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
internal static class GraphQueryJson
|
||||
{
|
||||
public static readonly System.Text.Json.JsonSerializerOptions Options = new(System.Text.Json.JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,246 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public sealed class InMemoryGraphPathService : IGraphPathService
|
||||
{
|
||||
private readonly InMemoryGraphRepository _repository;
|
||||
private readonly IOverlayService _overlayService;
|
||||
private static readonly JsonSerializerOptions Options = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public InMemoryGraphPathService(InMemoryGraphRepository repository, IOverlayService overlayService)
|
||||
{
|
||||
_repository = repository;
|
||||
_overlayService = overlayService;
|
||||
}
|
||||
|
||||
public async IAsyncEnumerable<string> FindPathsAsync(string tenant, GraphPathRequest request, [EnumeratorCancellation] CancellationToken ct = default)
|
||||
{
|
||||
var maxDepth = Math.Clamp(request.MaxDepth ?? 3, 1, 6);
|
||||
var budget = (request.Budget?.ApplyDefaults()) ?? GraphQueryBudget.Default.ApplyDefaults();
|
||||
var tileBudgetLimit = Math.Clamp(budget.Tiles ?? 6000, 1, 6000);
|
||||
var nodeBudgetRemaining = budget.Nodes ?? 5000;
|
||||
var edgeBudgetRemaining = budget.Edges ?? 10000;
|
||||
var budgetRemaining = tileBudgetLimit;
|
||||
var seq = 0;
|
||||
|
||||
var result = FindShortestPath(tenant, request, maxDepth);
|
||||
|
||||
if (result is null)
|
||||
{
|
||||
var error = new ErrorResponse
|
||||
{
|
||||
Error = "GRAPH_PATH_NOT_FOUND",
|
||||
Message = "No path found within depth budget.",
|
||||
Details = new { sources = request.Sources, targets = request.Targets, maxDepth }
|
||||
};
|
||||
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("error", seq++, error, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
yield break;
|
||||
}
|
||||
|
||||
var path = result.Value;
|
||||
|
||||
Dictionary<string, Dictionary<string, OverlayPayload>>? overlays = null;
|
||||
if (request.IncludeOverlays && path.Nodes.Count > 0)
|
||||
{
|
||||
overlays = (await _overlayService.GetOverlaysAsync(tenant, path.Nodes.Select(n => n.Id), sampleExplain: true, ct))
|
||||
.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
foreach (var node in path.Nodes)
|
||||
{
|
||||
if (budgetRemaining <= 0 || nodeBudgetRemaining <= 0)
|
||||
{
|
||||
yield return BudgetExceeded(tileBudgetLimit, budgetRemaining, seq++);
|
||||
yield break;
|
||||
}
|
||||
var nodeWithOverlay = node;
|
||||
if (request.IncludeOverlays && overlays is not null && overlays.TryGetValue(node.Id, out var nodeOverlays))
|
||||
{
|
||||
nodeWithOverlay = node with { Overlays = nodeOverlays };
|
||||
}
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("node", seq++, nodeWithOverlay, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
budgetRemaining--;
|
||||
nodeBudgetRemaining--;
|
||||
}
|
||||
|
||||
foreach (var edge in path.Edges)
|
||||
{
|
||||
if (budgetRemaining <= 0 || edgeBudgetRemaining <= 0)
|
||||
{
|
||||
yield return BudgetExceeded(tileBudgetLimit, budgetRemaining, seq++);
|
||||
yield break;
|
||||
}
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("edge", seq++, edge, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
budgetRemaining--;
|
||||
edgeBudgetRemaining--;
|
||||
}
|
||||
|
||||
if (budgetRemaining > 0)
|
||||
{
|
||||
var stats = new StatsTile
|
||||
{
|
||||
Nodes = path.Nodes.Count,
|
||||
Edges = path.Edges.Count
|
||||
};
|
||||
yield return JsonSerializer.Serialize(new TileEnvelope("stats", seq++, stats, Cost(tileBudgetLimit, budgetRemaining)), Options);
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
private static string BudgetExceeded(int limit, int remaining, int seq) =>
|
||||
JsonSerializer.Serialize(
|
||||
new TileEnvelope("error", seq, new ErrorResponse
|
||||
{
|
||||
Error = "GRAPH_BUDGET_EXCEEDED",
|
||||
Message = "Path computation exceeded tile budget."
|
||||
}, Cost(limit, remaining)),
|
||||
Options);
|
||||
|
||||
private (IReadOnlyList<NodeTile> Nodes, IReadOnlyList<EdgeTile> Edges)? FindShortestPath(string tenant, GraphPathRequest request, int maxDepth)
|
||||
{
|
||||
var nodes = _repository
|
||||
.Query(tenant, new GraphSearchRequest
|
||||
{
|
||||
Kinds = request.Kinds is { Length: > 0 } ? request.Kinds : _repositoryKindsForTenant(tenant),
|
||||
Filters = request.Filters
|
||||
})
|
||||
.ToDictionary(n => n.Id, StringComparer.Ordinal);
|
||||
|
||||
// ensure sources/targets are present even if filters/kinds excluded
|
||||
foreach (var id in request.Sources.Concat(request.Targets))
|
||||
{
|
||||
if (!nodes.ContainsKey(id))
|
||||
{
|
||||
var match = _repository.Query(tenant, new GraphSearchRequest
|
||||
{
|
||||
Kinds = Array.Empty<string>(),
|
||||
Query = id
|
||||
}).FirstOrDefault(n => n.Id.Equals(id, StringComparison.Ordinal));
|
||||
|
||||
if (match is not null)
|
||||
{
|
||||
nodes[id] = match;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var sources = request.Sources.Where(nodes.ContainsKey).Distinct(StringComparer.Ordinal).ToArray();
|
||||
var targets = request.Targets.ToHashSet(StringComparer.Ordinal);
|
||||
|
||||
if (sources.Length == 0 || targets.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var edges = _repositoryEdges(tenant)
|
||||
.Where(e => nodes.ContainsKey(e.Source) && nodes.ContainsKey(e.Target))
|
||||
.OrderBy(e => e.Id, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
var adjacency = new Dictionary<string, List<EdgeTile>>(StringComparer.Ordinal);
|
||||
foreach (var edge in edges)
|
||||
{
|
||||
if (!adjacency.TryGetValue(edge.Source, out var list))
|
||||
{
|
||||
list = new List<EdgeTile>();
|
||||
adjacency[edge.Source] = list;
|
||||
}
|
||||
list.Add(edge);
|
||||
}
|
||||
|
||||
var queue = new Queue<(string NodeId, List<EdgeTile> PathEdges, string Origin)>();
|
||||
var visited = new HashSet<string>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var source in sources.OrderBy(s => s, StringComparer.Ordinal))
|
||||
{
|
||||
queue.Enqueue((source, new List<EdgeTile>(), source));
|
||||
visited.Add(source);
|
||||
}
|
||||
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
var (current, pathEdges, origin) = queue.Dequeue();
|
||||
if (targets.Contains(current))
|
||||
{
|
||||
var pathNodes = BuildNodeListFromEdges(nodes, origin, current, pathEdges);
|
||||
return (pathNodes, pathEdges);
|
||||
}
|
||||
|
||||
if (pathEdges.Count >= maxDepth)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!adjacency.TryGetValue(current, out var outgoing))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var edge in outgoing)
|
||||
{
|
||||
if (visited.Contains(edge.Target))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var nextEdges = new List<EdgeTile>(pathEdges.Count + 1);
|
||||
nextEdges.AddRange(pathEdges);
|
||||
nextEdges.Add(edge);
|
||||
|
||||
queue.Enqueue((edge.Target, nextEdges, origin));
|
||||
visited.Add(edge.Target);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<NodeTile> BuildNodeListFromEdges(IDictionary<string, NodeTile> nodes, string currentSource, string target, List<EdgeTile> edges)
|
||||
{
|
||||
var list = new List<NodeTile>();
|
||||
var firstId = edges.Count > 0 ? edges[0].Source : currentSource;
|
||||
if (nodes.TryGetValue(firstId, out var first))
|
||||
{
|
||||
list.Add(first);
|
||||
}
|
||||
|
||||
foreach (var edge in edges)
|
||||
{
|
||||
if (nodes.TryGetValue(edge.Target, out var node))
|
||||
{
|
||||
list.Add(node);
|
||||
}
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
private IEnumerable<EdgeTile> _repositoryEdges(string tenant) =>
|
||||
_repository
|
||||
.QueryGraph(tenant, new GraphQueryRequest
|
||||
{
|
||||
Kinds = Array.Empty<string>(),
|
||||
IncludeEdges = true,
|
||||
IncludeStats = false,
|
||||
Query = null,
|
||||
Filters = null
|
||||
}).Edges;
|
||||
|
||||
private string[] _repositoryKindsForTenant(string tenant) =>
|
||||
_repository.Query(tenant, new GraphSearchRequest { Kinds = Array.Empty<string>(), Query = null, Filters = null })
|
||||
.Select(n => n.Kind)
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
|
||||
private static CostBudget Cost(int limit, int remaining) =>
|
||||
new(limit, remaining - 1, limit - (remaining - 1));
|
||||
}
|
||||
@@ -0,0 +1,209 @@
|
||||
using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public sealed class InMemoryGraphQueryService : IGraphQueryService
|
||||
{
|
||||
private readonly InMemoryGraphRepository _repository;
|
||||
private readonly IMemoryCache _cache;
|
||||
private readonly IOverlayService _overlayService;
|
||||
private readonly IGraphMetrics _metrics;
|
||||
private static readonly JsonSerializerOptions Options = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public InMemoryGraphQueryService(InMemoryGraphRepository repository, IMemoryCache cache, IOverlayService overlayService, IGraphMetrics metrics)
|
||||
{
|
||||
_repository = repository;
|
||||
_cache = cache;
|
||||
_overlayService = overlayService;
|
||||
_metrics = metrics;
|
||||
}
|
||||
|
||||
public async IAsyncEnumerable<string> QueryAsync(string tenant, GraphQueryRequest request, [EnumeratorCancellation] CancellationToken ct = default)
|
||||
{
|
||||
var stopwatch = System.Diagnostics.Stopwatch.StartNew();
|
||||
var limit = Math.Clamp(request.Limit ?? 100, 1, 500);
|
||||
var budget = (request.Budget?.ApplyDefaults()) ?? GraphQueryBudget.Default.ApplyDefaults();
|
||||
var tileBudgetLimit = Math.Clamp(budget.Tiles ?? 6000, 1, 6000);
|
||||
var nodeBudgetLimit = budget.Nodes ?? 5000;
|
||||
var edgeBudgetLimit = budget.Edges ?? 10000;
|
||||
|
||||
var cacheKey = BuildCacheKey(tenant, request, limit, tileBudgetLimit, nodeBudgetLimit, edgeBudgetLimit);
|
||||
|
||||
if (_cache.TryGetValue(cacheKey, out string[]? cached))
|
||||
{
|
||||
foreach (var line in cached)
|
||||
{
|
||||
yield return line;
|
||||
}
|
||||
yield break;
|
||||
}
|
||||
|
||||
var cursorOffset = CursorCodec.Decode(request.Cursor);
|
||||
var (nodes, edges) = _repository.QueryGraph(tenant, request);
|
||||
|
||||
if (request.IncludeEdges && edges.Count > edgeBudgetLimit)
|
||||
{
|
||||
_metrics.BudgetDenied.Add(1, new KeyValuePair<string, object?>("reason", "edges"));
|
||||
var error = new ErrorResponse
|
||||
{
|
||||
Error = "GRAPH_BUDGET_EXCEEDED",
|
||||
Message = $"Query exceeded edge budget (edges>{edgeBudgetLimit}).",
|
||||
Details = new { nodes = nodes.Count, edges = edges.Count, budget }
|
||||
};
|
||||
var errorLine = JsonSerializer.Serialize(new TileEnvelope("error", 0, error), Options);
|
||||
yield return errorLine;
|
||||
_cache.Set(cacheKey, new[] { errorLine }, new MemoryCacheEntryOptions
|
||||
{
|
||||
AbsoluteExpirationRelativeToNow = TimeSpan.FromMinutes(2)
|
||||
});
|
||||
yield break;
|
||||
}
|
||||
|
||||
var scored = nodes
|
||||
.Select(n => (Node: n, Score: Score(n, request)))
|
||||
.OrderByDescending(t => t.Score)
|
||||
.ThenBy(t => t.Node.Id, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var page = scored.Skip(cursorOffset).Take(limit).ToArray();
|
||||
var remainingNodes = Math.Max(0, scored.Length - cursorOffset - page.Length);
|
||||
var hasMore = remainingNodes > 0;
|
||||
|
||||
var seq = 0;
|
||||
var lines = new List<string>();
|
||||
var budgetRemaining = tileBudgetLimit;
|
||||
|
||||
Dictionary<string, Dictionary<string, OverlayPayload>>? overlays = null;
|
||||
if (request.IncludeOverlays && page.Length > 0)
|
||||
{
|
||||
overlays = (await _overlayService.GetOverlaysAsync(tenant, page.Select(p => p.Node.Id), sampleExplain: true, ct))
|
||||
.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
foreach (var item in page)
|
||||
{
|
||||
if (hasMore && budgetRemaining == 1)
|
||||
{
|
||||
break; // reserve one tile for cursor
|
||||
}
|
||||
|
||||
if (budgetRemaining <= 0 || nodeBudgetLimit <= 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var nodeToEmit = item.Node;
|
||||
if (request.IncludeOverlays && overlays is not null && overlays.TryGetValue(item.Node.Id, out var nodeOverlays))
|
||||
{
|
||||
nodeToEmit = item.Node with { Overlays = nodeOverlays };
|
||||
}
|
||||
|
||||
lines.Add(JsonSerializer.Serialize(new TileEnvelope("node", seq++, nodeToEmit, Cost(tileBudgetLimit, budgetRemaining)), Options));
|
||||
budgetRemaining--;
|
||||
nodeBudgetLimit--;
|
||||
}
|
||||
|
||||
if (request.IncludeEdges)
|
||||
{
|
||||
foreach (var edge in edges)
|
||||
{
|
||||
// Reserve cursor only if we actually have more nodes beyond current page
|
||||
if (hasMore && budgetRemaining == 1) break;
|
||||
if (budgetRemaining <= 0 || edgeBudgetLimit <= 0) break;
|
||||
lines.Add(JsonSerializer.Serialize(new TileEnvelope("edge", seq++, edge, Cost(tileBudgetLimit, budgetRemaining)), Options));
|
||||
budgetRemaining--;
|
||||
edgeBudgetLimit--;
|
||||
}
|
||||
}
|
||||
|
||||
if (request.IncludeStats && budgetRemaining > (hasMore ? 1 : 0))
|
||||
{
|
||||
var stats = new StatsTile
|
||||
{
|
||||
Nodes = nodes.Count,
|
||||
Edges = edges.Count
|
||||
};
|
||||
lines.Add(JsonSerializer.Serialize(new TileEnvelope("stats", seq++, stats, Cost(tileBudgetLimit, budgetRemaining)), Options));
|
||||
budgetRemaining--;
|
||||
}
|
||||
|
||||
if (hasMore && budgetRemaining > 0)
|
||||
{
|
||||
var nextCursor = CursorCodec.Encode(cursorOffset + page.Length);
|
||||
lines.Add(JsonSerializer.Serialize(new TileEnvelope("cursor", seq++, new CursorTile(nextCursor, $"https://gateway.local/api/graph/query?cursor={nextCursor}"), Cost(tileBudgetLimit, budgetRemaining)), Options));
|
||||
}
|
||||
|
||||
_cache.Set(cacheKey, lines.ToArray(), new MemoryCacheEntryOptions
|
||||
{
|
||||
AbsoluteExpirationRelativeToNow = TimeSpan.FromMinutes(2)
|
||||
});
|
||||
|
||||
stopwatch.Stop();
|
||||
_metrics.QueryLatencySeconds.Record(stopwatch.Elapsed.TotalSeconds, new KeyValuePair<string, object?>("route", "/graph/query"));
|
||||
|
||||
foreach (var line in lines)
|
||||
{
|
||||
yield return line;
|
||||
}
|
||||
}
|
||||
|
||||
private static string BuildCacheKey(string tenant, GraphQueryRequest request, int limit, int tileBudget, int nodeBudget, int edgeBudget)
|
||||
{
|
||||
var filters = request.Filters is null
|
||||
? string.Empty
|
||||
: string.Join(";", request.Filters.OrderBy(k => k.Key, StringComparer.OrdinalIgnoreCase)
|
||||
.Select(kvp => $"{kvp.Key}={kvp.Value}"));
|
||||
|
||||
var kinds = request.Kinds is null ? string.Empty : string.Join(",", request.Kinds.OrderBy(k => k, StringComparer.OrdinalIgnoreCase));
|
||||
var budget = request.Budget is null ? "budget:none" : $"tiles:{request.Budget.Tiles};nodes:{request.Budget.Nodes};edges:{request.Budget.Edges}";
|
||||
return $"{tenant}|{kinds}|{request.Query}|{limit}|{request.Cursor}|{filters}|edges:{request.IncludeEdges}|stats:{request.IncludeStats}|{budget}|tb:{tileBudget}|nb:{nodeBudget}|eb:{edgeBudget}";
|
||||
}
|
||||
|
||||
private static int Score(NodeTile node, GraphQueryRequest request)
|
||||
{
|
||||
var score = 0;
|
||||
if (!string.IsNullOrWhiteSpace(request.Query))
|
||||
{
|
||||
var query = request.Query!;
|
||||
score += MatchScore(node.Id, query, exact: 100, prefix: 80, contains: 50);
|
||||
foreach (var value in node.Attributes.Values.OfType<string>())
|
||||
{
|
||||
score += MatchScore(value, query, exact: 70, prefix: 40, contains: 25);
|
||||
}
|
||||
}
|
||||
|
||||
if (request.Filters is not null)
|
||||
{
|
||||
foreach (var filter in request.Filters)
|
||||
{
|
||||
if (node.Attributes.TryGetValue(filter.Key, out var value) && value is not null && filter.Value is not null)
|
||||
{
|
||||
if (value.ToString()!.Equals(filter.Value.ToString(), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
score += 5;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return score;
|
||||
}
|
||||
|
||||
private static int MatchScore(string candidate, string query, int exact, int prefix, int contains)
|
||||
{
|
||||
if (candidate.Equals(query, StringComparison.OrdinalIgnoreCase)) return exact;
|
||||
if (candidate.StartsWith(query, StringComparison.OrdinalIgnoreCase)) return prefix;
|
||||
return candidate.Contains(query, StringComparison.OrdinalIgnoreCase) ? contains : 0;
|
||||
}
|
||||
|
||||
private static CostBudget Cost(int limit, int remainingBudget) =>
|
||||
new(limit, remainingBudget - 1, limit - (remainingBudget - 1));
|
||||
}
|
||||
@@ -5,10 +5,12 @@ namespace StellaOps.Graph.Api.Services;
|
||||
public sealed class InMemoryGraphRepository
|
||||
{
|
||||
private readonly List<NodeTile> _nodes;
|
||||
private readonly List<EdgeTile> _edges;
|
||||
private readonly Dictionary<string, (List<NodeTile> Nodes, List<EdgeTile> Edges)> _snapshots;
|
||||
|
||||
public InMemoryGraphRepository()
|
||||
public InMemoryGraphRepository(IEnumerable<NodeTile>? seed = null, IEnumerable<EdgeTile>? edges = null)
|
||||
{
|
||||
_nodes = new List<NodeTile>
|
||||
_nodes = seed?.ToList() ?? new List<NodeTile>
|
||||
{
|
||||
new() { Id = "gn:acme:component:example", Kind = "component", Tenant = "acme", Attributes = new() { ["purl"] = "pkg:npm/example@1.0.0", ["ecosystem"] = "npm" } },
|
||||
new() { Id = "gn:acme:component:widget", Kind = "component", Tenant = "acme", Attributes = new() { ["purl"] = "pkg:npm/widget@2.0.0", ["ecosystem"] = "npm" } },
|
||||
@@ -17,16 +19,26 @@ public sealed class InMemoryGraphRepository
|
||||
new() { Id = "gn:bravo:component:widget", Kind = "component", Tenant = "bravo",Attributes = new() { ["purl"] = "pkg:npm/widget@2.0.0", ["ecosystem"] = "npm" } },
|
||||
new() { Id = "gn:bravo:artifact:sha256:def", Kind = "artifact", Tenant = "bravo",Attributes = new() { ["digest"] = "sha256:def", ["ecosystem"] = "container" } },
|
||||
};
|
||||
|
||||
_edges = edges?.ToList() ?? new List<EdgeTile>
|
||||
{
|
||||
new() { Id = "ge:acme:artifact->component", Kind = "builds", Tenant = "acme", Source = "gn:acme:artifact:sha256:abc", Target = "gn:acme:component:example", Attributes = new() { ["reason"] = "sbom" } },
|
||||
new() { Id = "ge:acme:component->component", Kind = "depends_on", Tenant = "acme", Source = "gn:acme:component:example", Target = "gn:acme:component:widget", Attributes = new() { ["scope"] = "runtime" } },
|
||||
new() { Id = "ge:bravo:artifact->component", Kind = "builds", Tenant = "bravo", Source = "gn:bravo:artifact:sha256:def", Target = "gn:bravo:component:widget", Attributes = new() { ["reason"] = "sbom" } },
|
||||
};
|
||||
|
||||
// Drop edges whose endpoints aren't present in the current node set to avoid invalid graph seeds in tests.
|
||||
var nodeIds = _nodes.Select(n => n.Id).ToHashSet(StringComparer.Ordinal);
|
||||
_edges = _edges.Where(e => nodeIds.Contains(e.Source) && nodeIds.Contains(e.Target)).ToList();
|
||||
|
||||
_snapshots = SeedSnapshots();
|
||||
}
|
||||
|
||||
public IEnumerable<NodeTile> Query(string tenant, GraphSearchRequest request)
|
||||
{
|
||||
var limit = Math.Clamp(request.Limit ?? 50, 1, 500);
|
||||
var cursorOffset = CursorCodec.Decode(request.Cursor);
|
||||
|
||||
var queryable = _nodes
|
||||
.Where(n => n.Tenant.Equals(tenant, StringComparison.Ordinal))
|
||||
.Where(n => request.Kinds.Contains(n.Kind, StringComparer.OrdinalIgnoreCase));
|
||||
.Where(n => request.Kinds is null || request.Kinds.Length == 0 || request.Kinds.Contains(n.Kind, StringComparer.OrdinalIgnoreCase));
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.Query))
|
||||
{
|
||||
@@ -38,13 +50,82 @@ public sealed class InMemoryGraphRepository
|
||||
queryable = queryable.Where(n => FiltersMatch(n, request.Filters!));
|
||||
}
|
||||
|
||||
queryable = request.Ordering switch
|
||||
return queryable;
|
||||
}
|
||||
|
||||
public (IReadOnlyList<NodeTile> Nodes, IReadOnlyList<EdgeTile> Edges) QueryGraph(string tenant, GraphQueryRequest request)
|
||||
{
|
||||
var nodes = Query(tenant, new GraphSearchRequest
|
||||
{
|
||||
"id" => queryable.OrderBy(n => n.Id, StringComparer.Ordinal),
|
||||
_ => queryable.OrderBy(n => n.Id.Length).ThenBy(n => n.Id, StringComparer.Ordinal)
|
||||
Kinds = request.Kinds,
|
||||
Query = request.Query,
|
||||
Filters = request.Filters,
|
||||
Limit = request.Limit,
|
||||
Cursor = request.Cursor
|
||||
}).ToList();
|
||||
|
||||
var nodeIds = nodes.Select(n => n.Id).ToHashSet(StringComparer.Ordinal);
|
||||
var edges = request.IncludeEdges
|
||||
? _edges.Where(e => e.Tenant.Equals(tenant, StringComparison.Ordinal) && nodeIds.Contains(e.Source) && nodeIds.Contains(e.Target))
|
||||
.OrderBy(e => e.Id, StringComparer.Ordinal)
|
||||
.ToList()
|
||||
: new List<EdgeTile>();
|
||||
|
||||
return (nodes, edges);
|
||||
}
|
||||
|
||||
public (IReadOnlyList<NodeTile> Nodes, IReadOnlyList<EdgeTile> Edges)? GetSnapshot(string tenant, string snapshotId)
|
||||
{
|
||||
if (_snapshots.TryGetValue($"{tenant}:{snapshotId}", out var snap))
|
||||
{
|
||||
return (snap.Nodes, snap.Edges);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private Dictionary<string, (List<NodeTile> Nodes, List<EdgeTile> Edges)> SeedSnapshots()
|
||||
{
|
||||
var dict = new Dictionary<string, (List<NodeTile>, List<EdgeTile>)>(StringComparer.Ordinal);
|
||||
|
||||
dict["acme:snapA"] = (new List<NodeTile>(_nodes), new List<EdgeTile>(_edges));
|
||||
|
||||
var updatedNodes = new List<NodeTile>(_nodes.Select(n => n with
|
||||
{
|
||||
Attributes = new Dictionary<string, object?>(n.Attributes)
|
||||
}));
|
||||
|
||||
var widget = updatedNodes.FirstOrDefault(n => n.Id == "gn:acme:component:widget");
|
||||
if (widget is null)
|
||||
{
|
||||
// Custom seeds may not include the default widget node; skip optional snapshot wiring in that case.
|
||||
return dict;
|
||||
}
|
||||
|
||||
widget.Attributes["purl"] = "pkg:npm/widget@2.1.0";
|
||||
|
||||
updatedNodes.Add(new NodeTile
|
||||
{
|
||||
Id = "gn:acme:component:newlib",
|
||||
Kind = "component",
|
||||
Tenant = "acme",
|
||||
Attributes = new() { ["purl"] = "pkg:npm/newlib@1.0.0", ["ecosystem"] = "npm" }
|
||||
});
|
||||
|
||||
var updatedEdges = new List<EdgeTile>(_edges)
|
||||
{
|
||||
new()
|
||||
{
|
||||
Id = "ge:acme:component->component:new",
|
||||
Kind = "depends_on",
|
||||
Tenant = "acme",
|
||||
Source = widget.Id,
|
||||
Target = "gn:acme:component:newlib",
|
||||
Attributes = new() { ["scope"] = "runtime" }
|
||||
}
|
||||
};
|
||||
|
||||
return queryable.Skip(cursorOffset).Take(limit + 1).ToArray();
|
||||
dict["acme:snapB"] = (updatedNodes, updatedEdges);
|
||||
return dict;
|
||||
}
|
||||
|
||||
private static bool MatchesQuery(NodeTile node, string query)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
@@ -8,39 +9,128 @@ namespace StellaOps.Graph.Api.Services;
|
||||
public sealed class InMemoryGraphSearchService : IGraphSearchService
|
||||
{
|
||||
private readonly InMemoryGraphRepository _repository;
|
||||
private readonly IMemoryCache _cache;
|
||||
private static readonly JsonSerializerOptions Options = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public InMemoryGraphSearchService(InMemoryGraphRepository repository)
|
||||
public InMemoryGraphSearchService(InMemoryGraphRepository repository, IMemoryCache cache)
|
||||
{
|
||||
_repository = repository;
|
||||
_cache = cache;
|
||||
}
|
||||
|
||||
public async IAsyncEnumerable<string> SearchAsync(string tenant, GraphSearchRequest request, [EnumeratorCancellation] CancellationToken ct = default)
|
||||
{
|
||||
var limit = Math.Clamp(request.Limit ?? 50, 1, 500);
|
||||
var results = _repository.Query(tenant, request).ToArray();
|
||||
var cacheKey = BuildCacheKey(tenant, request, limit);
|
||||
if (_cache.TryGetValue(cacheKey, out string[]? cachedLines))
|
||||
{
|
||||
foreach (var cached in cachedLines)
|
||||
{
|
||||
yield return cached;
|
||||
}
|
||||
yield break;
|
||||
}
|
||||
|
||||
var items = results.Take(limit).ToArray();
|
||||
var remaining = results.Length > limit ? results.Length - limit : 0;
|
||||
var cost = new CostBudget(limit, Math.Max(0, limit - items.Length), items.Length);
|
||||
var cursorOffset = CursorCodec.Decode(request.Cursor);
|
||||
var results = _repository.Query(tenant, request).ToArray();
|
||||
var total = results.Length;
|
||||
|
||||
var scored = results
|
||||
.Select(n => (Node: n, Score: Score(n, request)))
|
||||
.OrderByDescending(t => t.Score)
|
||||
.ThenBy(t => t.Node.Id, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var ordered = request.Ordering switch
|
||||
{
|
||||
"id" => scored.OrderBy(t => t.Node.Id, StringComparer.Ordinal).ToArray(),
|
||||
_ => scored
|
||||
};
|
||||
|
||||
var page = ordered.Skip(cursorOffset).Take(limit).ToArray();
|
||||
var remaining = Math.Max(0, total - cursorOffset - page.Length);
|
||||
var hasMore = total > cursorOffset + page.Length || total > limit;
|
||||
if (!hasMore && remaining <= 0 && total > limit)
|
||||
{
|
||||
hasMore = true;
|
||||
remaining = Math.Max(1, total - limit);
|
||||
}
|
||||
var cost = new CostBudget(limit, remaining, page.Length);
|
||||
|
||||
var seq = 0;
|
||||
foreach (var item in items)
|
||||
var lines = new List<string>();
|
||||
foreach (var item in page)
|
||||
{
|
||||
var envelope = new TileEnvelope("node", seq++, item, cost);
|
||||
yield return JsonSerializer.Serialize(envelope, Options);
|
||||
var envelope = new TileEnvelope("node", seq++, item.Node, cost);
|
||||
lines.Add(JsonSerializer.Serialize(envelope, Options));
|
||||
}
|
||||
|
||||
if (remaining > 0)
|
||||
if (hasMore)
|
||||
{
|
||||
var nextCursor = CursorCodec.Encode(CursorCodec.Decode(request.Cursor) + items.Length);
|
||||
var nextCursor = CursorCodec.Encode(cursorOffset + page.Length);
|
||||
var cursorTile = new TileEnvelope("cursor", seq++, new CursorTile(nextCursor, $"https://gateway.local/api/graph/search?cursor={nextCursor}"));
|
||||
yield return JsonSerializer.Serialize(cursorTile, Options);
|
||||
lines.Add(JsonSerializer.Serialize(cursorTile, Options));
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
_cache.Set(cacheKey, lines.ToArray(), new MemoryCacheEntryOptions
|
||||
{
|
||||
AbsoluteExpirationRelativeToNow = TimeSpan.FromMinutes(2)
|
||||
});
|
||||
|
||||
foreach (var line in lines)
|
||||
{
|
||||
yield return line;
|
||||
}
|
||||
}
|
||||
|
||||
private static string BuildCacheKey(string tenant, GraphSearchRequest request, int limit)
|
||||
{
|
||||
var filters = request.Filters is null
|
||||
? string.Empty
|
||||
: string.Join(";", request.Filters.OrderBy(k => k.Key, StringComparer.OrdinalIgnoreCase)
|
||||
.Select(kvp => $"{kvp.Key}={kvp.Value}"));
|
||||
|
||||
var kinds = request.Kinds is null ? string.Empty : string.Join(",", request.Kinds.OrderBy(k => k, StringComparer.OrdinalIgnoreCase));
|
||||
return $"{tenant}|{kinds}|{request.Query}|{limit}|{request.Ordering}|{request.Cursor}|{filters}";
|
||||
}
|
||||
|
||||
private static int Score(NodeTile node, GraphSearchRequest request)
|
||||
{
|
||||
var score = 0;
|
||||
if (!string.IsNullOrWhiteSpace(request.Query))
|
||||
{
|
||||
var query = request.Query!;
|
||||
score += MatchScore(node.Id, query, exact: 100, prefix: 80, contains: 50);
|
||||
foreach (var value in node.Attributes.Values.OfType<string>())
|
||||
{
|
||||
score += MatchScore(value, query, exact: 70, prefix: 40, contains: 25);
|
||||
}
|
||||
}
|
||||
|
||||
if (request.Filters is not null)
|
||||
{
|
||||
foreach (var filter in request.Filters)
|
||||
{
|
||||
if (node.Attributes.TryGetValue(filter.Key, out var value) && value is not null && filter.Value is not null)
|
||||
{
|
||||
if (value.ToString()!.Equals(filter.Value.ToString(), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
score += 5;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return score;
|
||||
}
|
||||
|
||||
private static int MatchScore(string candidate, string query, int exact, int prefix, int contains)
|
||||
{
|
||||
if (candidate.Equals(query, StringComparison.OrdinalIgnoreCase)) return exact;
|
||||
if (candidate.StartsWith(query, StringComparison.OrdinalIgnoreCase)) return prefix;
|
||||
return candidate.Contains(query, StringComparison.OrdinalIgnoreCase) ? contains : 0;
|
||||
}
|
||||
}
|
||||
|
||||
115
src/Graph/StellaOps.Graph.Api/Services/InMemoryOverlayService.cs
Normal file
115
src/Graph/StellaOps.Graph.Api/Services/InMemoryOverlayService.cs
Normal file
@@ -0,0 +1,115 @@
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
public sealed class InMemoryOverlayService : IOverlayService
|
||||
{
|
||||
private readonly IMemoryCache _cache;
|
||||
private static readonly DateTimeOffset FixedTimestamp = new(2025, 11, 23, 0, 0, 0, TimeSpan.Zero);
|
||||
private readonly IGraphMetrics _metrics;
|
||||
|
||||
public InMemoryOverlayService(IMemoryCache cache, IGraphMetrics metrics)
|
||||
{
|
||||
_cache = cache;
|
||||
_metrics = metrics;
|
||||
}
|
||||
|
||||
public Task<IDictionary<string, Dictionary<string, OverlayPayload>>> GetOverlaysAsync(string tenant, IEnumerable<string> nodeIds, bool sampleExplain, CancellationToken ct = default)
|
||||
{
|
||||
var result = new Dictionary<string, Dictionary<string, OverlayPayload>>(StringComparer.Ordinal);
|
||||
var explainEmitted = false;
|
||||
|
||||
foreach (var nodeId in nodeIds)
|
||||
{
|
||||
var cacheKey = $"overlay:{tenant}:{nodeId}";
|
||||
if (!_cache.TryGetValue(cacheKey, out Dictionary<string, OverlayPayload>? cachedBase))
|
||||
{
|
||||
_metrics.OverlayCacheMiss.Add(1);
|
||||
cachedBase = new Dictionary<string, OverlayPayload>(StringComparer.Ordinal)
|
||||
{
|
||||
["policy"] = BuildPolicyOverlay(tenant, nodeId, includeExplain: false),
|
||||
["vex"] = BuildVexOverlay(tenant, nodeId)
|
||||
};
|
||||
|
||||
_cache.Set(cacheKey, cachedBase, new MemoryCacheEntryOptions
|
||||
{
|
||||
AbsoluteExpirationRelativeToNow = TimeSpan.FromMinutes(10)
|
||||
});
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
_metrics.OverlayCacheHit.Add(1);
|
||||
}
|
||||
|
||||
// Always return a fresh copy so we can inject a single explain trace without polluting cache.
|
||||
var overlays = new Dictionary<string, OverlayPayload>(cachedBase, StringComparer.Ordinal);
|
||||
|
||||
if (sampleExplain && !explainEmitted)
|
||||
{
|
||||
overlays["policy"] = BuildPolicyOverlay(tenant, nodeId, includeExplain: true);
|
||||
explainEmitted = true;
|
||||
}
|
||||
|
||||
result[nodeId] = overlays;
|
||||
}
|
||||
|
||||
return Task.FromResult<IDictionary<string, Dictionary<string, OverlayPayload>>>(result);
|
||||
}
|
||||
|
||||
private static OverlayPayload BuildPolicyOverlay(string tenant, string nodeId, bool includeExplain)
|
||||
{
|
||||
var overlayId = ComputeOverlayId(tenant, nodeId, "policy");
|
||||
return new OverlayPayload(
|
||||
Kind: "policy",
|
||||
Version: "policy.overlay.v1",
|
||||
Data: new
|
||||
{
|
||||
overlayId,
|
||||
subject = nodeId,
|
||||
decision = "warn",
|
||||
rationale = new[] { "policy-default", "missing VEX waiver" },
|
||||
inputs = new
|
||||
{
|
||||
sbomDigest = "sha256:demo-sbom",
|
||||
policyVersion = "2025.11.23",
|
||||
advisoriesDigest = "sha256:demo-advisories"
|
||||
},
|
||||
policyVersion = "2025.11.23",
|
||||
createdAt = FixedTimestamp,
|
||||
explainTrace = includeExplain
|
||||
? new[]
|
||||
{
|
||||
"matched rule POLICY-ENGINE-30-001",
|
||||
$"node {nodeId} lacks VEX waiver"
|
||||
}
|
||||
: null
|
||||
});
|
||||
}
|
||||
|
||||
private static OverlayPayload BuildVexOverlay(string tenant, string nodeId)
|
||||
{
|
||||
var overlayId = ComputeOverlayId(tenant, nodeId, "vex");
|
||||
return new OverlayPayload(
|
||||
Kind: "vex",
|
||||
Version: "openvex.v1",
|
||||
Data: new
|
||||
{
|
||||
overlayId,
|
||||
subject = nodeId,
|
||||
status = "not_affected",
|
||||
justification = "component_not_present",
|
||||
issued = FixedTimestamp,
|
||||
impacts = Array.Empty<string>()
|
||||
});
|
||||
}
|
||||
|
||||
private static string ComputeOverlayId(string tenant, string nodeId, string overlayKind)
|
||||
{
|
||||
using var sha = System.Security.Cryptography.SHA256.Create();
|
||||
var bytes = System.Text.Encoding.UTF8.GetBytes($"{tenant}|{nodeId}|{overlayKind}");
|
||||
var hash = sha.ComputeHash(bytes);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
59
src/Graph/StellaOps.Graph.Api/Services/RateLimiterService.cs
Normal file
59
src/Graph/StellaOps.Graph.Api/Services/RateLimiterService.cs
Normal file
@@ -0,0 +1,59 @@
|
||||
namespace StellaOps.Graph.Api.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Simple fixed-window rate limiter keyed by tenant + route. Designed for in-memory demo usage.
|
||||
/// </summary>
|
||||
public interface IRateLimiter
|
||||
{
|
||||
bool Allow(string tenant, string route);
|
||||
}
|
||||
|
||||
internal interface IClock
|
||||
{
|
||||
DateTimeOffset UtcNow { get; }
|
||||
}
|
||||
|
||||
internal sealed class SystemClock : IClock
|
||||
{
|
||||
public DateTimeOffset UtcNow => DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
public sealed class RateLimiterService : IRateLimiter
|
||||
{
|
||||
private readonly TimeSpan _window;
|
||||
private readonly int _limit;
|
||||
private readonly IClock _clock;
|
||||
private readonly Dictionary<string, (DateTimeOffset WindowStart, int Count)> _state = new(StringComparer.Ordinal);
|
||||
private readonly object _lock = new();
|
||||
|
||||
public RateLimiterService(int limitPerWindow = 120, TimeSpan? window = null, IClock? clock = null)
|
||||
{
|
||||
_limit = limitPerWindow;
|
||||
_window = window ?? TimeSpan.FromMinutes(1);
|
||||
_clock = clock ?? new SystemClock();
|
||||
}
|
||||
|
||||
public bool Allow(string tenant, string route)
|
||||
{
|
||||
var key = $"{tenant}:{route}";
|
||||
var now = _clock.UtcNow;
|
||||
lock (_lock)
|
||||
{
|
||||
if (_state.TryGetValue(key, out var entry))
|
||||
{
|
||||
if (now - entry.WindowStart < _window)
|
||||
{
|
||||
if (entry.Count >= _limit)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
_state[key] = (entry.WindowStart, entry.Count + 1);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
_state[key] = (now, 1);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,5 +5,7 @@
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<GenerateDocumentationFile>true</GenerateDocumentationFile>
|
||||
<NoWarn>1591</NoWarn>
|
||||
<!-- Speed up local test builds by skipping static web assets discovery -->
|
||||
<DisableStaticWebAssets>true</DisableStaticWebAssets>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
using System.Linq;
|
||||
using StellaOps.Graph.Api.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Graph.Api.Tests;
|
||||
|
||||
public class AuditLoggerTests
|
||||
{
|
||||
[Fact]
|
||||
public void LogsAndCapsSize()
|
||||
{
|
||||
var logger = new InMemoryAuditLogger();
|
||||
for (var i = 0; i < 510; i++)
|
||||
{
|
||||
logger.Log(new AuditEvent(
|
||||
Timestamp: DateTimeOffset.UnixEpoch.AddMinutes(i),
|
||||
Tenant: "t",
|
||||
Route: "/r",
|
||||
Method: "POST",
|
||||
Actor: "auth",
|
||||
Scopes: new[] { "graph:query" },
|
||||
StatusCode: 200,
|
||||
DurationMs: 5));
|
||||
}
|
||||
|
||||
var recent = logger.GetRecent();
|
||||
Assert.True(recent.Count <= 100);
|
||||
Assert.Equal(509, recent.First().Timestamp.Minute);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
using StellaOps.Graph.Api.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Graph.Api.Tests;
|
||||
|
||||
public class DiffServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task DiffAsync_EmitsAddedRemovedChangedAndStats()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository();
|
||||
var service = new InMemoryGraphDiffService(repo);
|
||||
|
||||
var request = new GraphDiffRequest
|
||||
{
|
||||
SnapshotA = "snapA",
|
||||
SnapshotB = "snapB",
|
||||
IncludeEdges = true,
|
||||
IncludeStats = true
|
||||
};
|
||||
|
||||
var lines = new List<string>();
|
||||
await foreach (var line in service.DiffAsync("acme", request))
|
||||
{
|
||||
lines.Add(line);
|
||||
}
|
||||
|
||||
Assert.Contains(lines, l => l.Contains("\"type\":\"node_added\"") && l.Contains("newlib"));
|
||||
Assert.Contains(lines, l => l.Contains("\"type\":\"node_changed\"") && l.Contains("widget"));
|
||||
Assert.Contains(lines, l => l.Contains("\"type\":\"edge_added\""));
|
||||
Assert.Contains(lines, l => l.Contains("\"type\":\"stats\""));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiffAsync_WhenSnapshotMissing_ReturnsError()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository();
|
||||
var service = new InMemoryGraphDiffService(repo);
|
||||
|
||||
var request = new GraphDiffRequest
|
||||
{
|
||||
SnapshotA = "snapA",
|
||||
SnapshotB = "missing"
|
||||
};
|
||||
|
||||
var lines = new List<string>();
|
||||
await foreach (var line in service.DiffAsync("acme", request))
|
||||
{
|
||||
lines.Add(line);
|
||||
}
|
||||
|
||||
Assert.Single(lines);
|
||||
Assert.Contains("GRAPH_SNAPSHOT_NOT_FOUND", lines[0]);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
using StellaOps.Graph.Api.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Graph.Api.Tests;
|
||||
|
||||
public class ExportServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Export_ReturnsManifestAndDownloadablePayload()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository();
|
||||
var metrics = new GraphMetrics();
|
||||
var export = new InMemoryGraphExportService(repo, metrics);
|
||||
var req = new GraphExportRequest { Format = "ndjson", IncludeEdges = true };
|
||||
|
||||
var job = await export.StartExportAsync("acme", req);
|
||||
|
||||
Assert.NotNull(job);
|
||||
Assert.Equal("ndjson", job.Format, ignoreCase: true);
|
||||
Assert.True(job.Payload.Length > 0);
|
||||
Assert.False(string.IsNullOrWhiteSpace(job.Sha256));
|
||||
|
||||
var fetched = export.Get(job.JobId);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal(job.Sha256, fetched!.Sha256);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Export_IncludesEdgesWhenRequested()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository();
|
||||
var metrics = new GraphMetrics();
|
||||
var export = new InMemoryGraphExportService(repo, metrics);
|
||||
var req = new GraphExportRequest { Format = "ndjson", IncludeEdges = true };
|
||||
|
||||
var job = await export.StartExportAsync("acme", req);
|
||||
var text = System.Text.Encoding.UTF8.GetString(job.Payload);
|
||||
Assert.Contains("\"type\":\"edge\"", text);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Export_RespectsSnapshotSelection()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository();
|
||||
var metrics = new GraphMetrics();
|
||||
var export = new InMemoryGraphExportService(repo, metrics);
|
||||
var req = new GraphExportRequest { Format = "ndjson", IncludeEdges = false, SnapshotId = "snapB" };
|
||||
|
||||
var job = await export.StartExportAsync("acme", req);
|
||||
var lines = System.Text.Encoding.UTF8.GetString(job.Payload)
|
||||
.Split('\n', StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
Assert.Contains(lines, l => l.Contains("newlib"));
|
||||
}
|
||||
}
|
||||
114
src/Graph/__Tests/StellaOps.Graph.Api.Tests/LoadTests.cs
Normal file
114
src/Graph/__Tests/StellaOps.Graph.Api.Tests/LoadTests.cs
Normal file
@@ -0,0 +1,114 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
using StellaOps.Graph.Api.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Graph.Api.Tests;
|
||||
|
||||
public class LoadTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task DeterministicOrdering_WithSyntheticGraph_RemainsStable()
|
||||
{
|
||||
var builder = new SyntheticGraphBuilder(seed: 42, nodeCount: 1000, edgeCount: 2000);
|
||||
var repo = builder.BuildRepository();
|
||||
var cache = new MemoryCache(new MemoryCacheOptions());
|
||||
var metrics = new GraphMetrics();
|
||||
var overlays = new InMemoryOverlayService(cache, metrics);
|
||||
var service = new InMemoryGraphQueryService(repo, cache, overlays, metrics);
|
||||
|
||||
var request = new GraphQueryRequest
|
||||
{
|
||||
Kinds = new[] { "component" },
|
||||
Query = "pkg:",
|
||||
IncludeEdges = true,
|
||||
Limit = 200
|
||||
};
|
||||
|
||||
var linesRun1 = await CollectLines(service, request);
|
||||
var linesRun2 = await CollectLines(service, request);
|
||||
|
||||
Assert.Equal(linesRun1.Count, linesRun2.Count);
|
||||
Assert.Equal(linesRun1, linesRun2); // strict deterministic ordering
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void QueryValidator_FuzzesInvalidInputs()
|
||||
{
|
||||
var rand = new Random(123);
|
||||
for (var i = 0; i < 50; i++)
|
||||
{
|
||||
var req = new GraphQueryRequest
|
||||
{
|
||||
Kinds = Array.Empty<string>(),
|
||||
Limit = rand.Next(-10, 0),
|
||||
Budget = new GraphQueryBudget { Tiles = rand.Next(-50, 0), Nodes = rand.Next(-5, 0), Edges = rand.Next(-5, 0) }
|
||||
};
|
||||
|
||||
var error = QueryValidator.Validate(req);
|
||||
Assert.NotNull(error);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<List<string>> CollectLines(InMemoryGraphQueryService service, GraphQueryRequest request)
|
||||
{
|
||||
var lines = new List<string>();
|
||||
await foreach (var line in service.QueryAsync("acme", request))
|
||||
{
|
||||
lines.Add(line);
|
||||
}
|
||||
return lines;
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class SyntheticGraphBuilder
|
||||
{
|
||||
private readonly int _nodeCount;
|
||||
private readonly int _edgeCount;
|
||||
private readonly Random _rand;
|
||||
|
||||
public SyntheticGraphBuilder(int seed, int nodeCount, int edgeCount)
|
||||
{
|
||||
_nodeCount = nodeCount;
|
||||
_edgeCount = edgeCount;
|
||||
_rand = new Random(seed);
|
||||
}
|
||||
|
||||
public InMemoryGraphRepository BuildRepository()
|
||||
{
|
||||
var nodes = Enumerable.Range(0, _nodeCount)
|
||||
.Select(i => new NodeTile
|
||||
{
|
||||
Id = $"gn:acme:component:{i:D5}",
|
||||
Kind = "component",
|
||||
Tenant = "acme",
|
||||
Attributes = new()
|
||||
{
|
||||
["purl"] = $"pkg:npm/example{i}@1.0.0",
|
||||
["ecosystem"] = "npm"
|
||||
}
|
||||
})
|
||||
.ToList();
|
||||
|
||||
var edges = new List<EdgeTile>();
|
||||
for (var i = 0; i < _edgeCount; i++)
|
||||
{
|
||||
var source = _rand.Next(0, _nodeCount);
|
||||
var target = _rand.Next(0, _nodeCount);
|
||||
if (source == target) target = (target + 1) % _nodeCount;
|
||||
edges.Add(new EdgeTile
|
||||
{
|
||||
Id = $"ge:acme:{i:D6}",
|
||||
Kind = "depends_on",
|
||||
Tenant = "acme",
|
||||
Source = nodes[source].Id,
|
||||
Target = nodes[target].Id
|
||||
});
|
||||
}
|
||||
|
||||
return new InMemoryGraphRepository(nodes, edges);
|
||||
}
|
||||
}
|
||||
92
src/Graph/__Tests/StellaOps.Graph.Api.Tests/MetricsTests.cs
Normal file
92
src/Graph/__Tests/StellaOps.Graph.Api.Tests/MetricsTests.cs
Normal file
@@ -0,0 +1,92 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
using StellaOps.Graph.Api.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Graph.Api.Tests;
|
||||
|
||||
public class MetricsTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task BudgetDeniedCounter_IncrementsOnEdgeBudgetExceeded()
|
||||
{
|
||||
using var metrics = new GraphMetrics();
|
||||
using var listener = new MeterListener();
|
||||
long count = 0;
|
||||
listener.InstrumentPublished = (instrument, l) =>
|
||||
{
|
||||
if (instrument.Meter == metrics.Meter && instrument.Name == "graph_query_budget_denied_total")
|
||||
{
|
||||
l.EnableMeasurementEvents(instrument);
|
||||
}
|
||||
};
|
||||
listener.SetMeasurementEventCallback<long>((inst, val, tags, state) => { count += val; });
|
||||
listener.Start();
|
||||
|
||||
var repo = new InMemoryGraphRepository(new[]
|
||||
{
|
||||
new NodeTile { Id = "gn:acme:component:one", Kind = "component", Tenant = "acme" },
|
||||
new NodeTile { Id = "gn:acme:component:two", Kind = "component", Tenant = "acme" },
|
||||
}, new[]
|
||||
{
|
||||
new EdgeTile { Id = "ge:acme:one-two", Kind = "depends_on", Tenant = "acme", Source = "gn:acme:component:one", Target = "gn:acme:component:two" }
|
||||
});
|
||||
|
||||
var cache = new MemoryCache(new MemoryCacheOptions());
|
||||
var overlays = new InMemoryOverlayService(cache, metrics);
|
||||
var service = new InMemoryGraphQueryService(repo, cache, overlays, metrics);
|
||||
var request = new GraphQueryRequest
|
||||
{
|
||||
Kinds = new[] { "component" },
|
||||
IncludeEdges = true,
|
||||
Budget = new GraphQueryBudget { Tiles = 1, Nodes = 1, Edges = 0 }
|
||||
};
|
||||
|
||||
await foreach (var _ in service.QueryAsync("acme", request)) { }
|
||||
listener.RecordObservableInstruments();
|
||||
Assert.Equal(1, count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OverlayCacheCounters_RecordHitsAndMisses()
|
||||
{
|
||||
using var metrics = new GraphMetrics();
|
||||
using var listener = new MeterListener();
|
||||
long hits = 0;
|
||||
long misses = 0;
|
||||
listener.InstrumentPublished = (instrument, l) =>
|
||||
{
|
||||
if (instrument.Meter == metrics.Meter && instrument.Name is "graph_overlay_cache_hits_total" or "graph_overlay_cache_misses_total")
|
||||
{
|
||||
l.EnableMeasurementEvents(instrument);
|
||||
}
|
||||
};
|
||||
listener.SetMeasurementEventCallback<long>((inst, val, tags, state) =>
|
||||
{
|
||||
if (inst.Name == "graph_overlay_cache_hits_total") hits += val;
|
||||
if (inst.Name == "graph_overlay_cache_misses_total") misses += val;
|
||||
});
|
||||
listener.Start();
|
||||
|
||||
var repo = new InMemoryGraphRepository(new[]
|
||||
{
|
||||
new NodeTile { Id = "gn:acme:component:one", Kind = "component", Tenant = "acme" }
|
||||
}, Array.Empty<EdgeTile>());
|
||||
|
||||
var cache = new MemoryCache(new MemoryCacheOptions());
|
||||
var overlays = new InMemoryOverlayService(cache, metrics);
|
||||
var service = new InMemoryGraphQueryService(repo, cache, overlays, metrics);
|
||||
var request = new GraphQueryRequest { Kinds = new[] { "component" }, IncludeOverlays = true, Limit = 1 };
|
||||
|
||||
await foreach (var _ in service.QueryAsync("acme", request)) { } // miss
|
||||
await foreach (var _ in service.QueryAsync("acme", request)) { } // hit
|
||||
|
||||
listener.RecordObservableInstruments();
|
||||
Assert.Equal(1, misses);
|
||||
Assert.Equal(1, hits);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
using StellaOps.Graph.Api.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Graph.Api.Tests;
|
||||
|
||||
public class PathServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task FindPathsAsync_ReturnsShortestPathWithinDepth()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository();
|
||||
var cache = new MemoryCache(new MemoryCacheOptions());
|
||||
var overlays = new InMemoryOverlayService(cache);
|
||||
var service = new InMemoryGraphPathService(repo, overlays);
|
||||
|
||||
var request = new GraphPathRequest
|
||||
{
|
||||
Sources = new[] { "gn:acme:artifact:sha256:abc" },
|
||||
Targets = new[] { "gn:acme:component:widget" },
|
||||
MaxDepth = 4
|
||||
};
|
||||
|
||||
var lines = new List<string>();
|
||||
await foreach (var line in service.FindPathsAsync("acme", request))
|
||||
{
|
||||
lines.Add(line);
|
||||
}
|
||||
|
||||
Assert.Contains(lines, l => l.Contains("\"type\":\"node\"") && l.Contains("gn:acme:component:widget"));
|
||||
Assert.Contains(lines, l => l.Contains("\"type\":\"edge\"") && l.Contains("\"kind\":\"builds\""));
|
||||
Assert.Contains(lines, l => l.Contains("\"type\":\"stats\""));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindPathsAsync_WhenNoPath_ReturnsErrorTile()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository();
|
||||
var cache = new MemoryCache(new MemoryCacheOptions());
|
||||
var overlays = new InMemoryOverlayService(cache);
|
||||
var service = new InMemoryGraphPathService(repo, overlays);
|
||||
|
||||
var request = new GraphPathRequest
|
||||
{
|
||||
Sources = new[] { "gn:acme:artifact:sha256:abc" },
|
||||
Targets = new[] { "gn:bravo:component:widget" },
|
||||
MaxDepth = 2
|
||||
};
|
||||
|
||||
var lines = new List<string>();
|
||||
await foreach (var line in service.FindPathsAsync("acme", request))
|
||||
{
|
||||
lines.Add(line);
|
||||
}
|
||||
|
||||
Assert.Single(lines);
|
||||
Assert.Contains("GRAPH_PATH_NOT_FOUND", lines[0]);
|
||||
}
|
||||
}
|
||||
114
src/Graph/__Tests/StellaOps.Graph.Api.Tests/QueryServiceTests.cs
Normal file
114
src/Graph/__Tests/StellaOps.Graph.Api.Tests/QueryServiceTests.cs
Normal file
@@ -0,0 +1,114 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
using StellaOps.Graph.Api.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Graph.Api.Tests;
|
||||
|
||||
public class QueryServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task QueryAsync_EmitsNodesEdgesStatsAndCursor()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository();
|
||||
var service = CreateService(repo);
|
||||
|
||||
var request = new GraphQueryRequest
|
||||
{
|
||||
Kinds = new[] { "component", "artifact" },
|
||||
Query = "component",
|
||||
Limit = 1,
|
||||
IncludeEdges = true,
|
||||
IncludeStats = true
|
||||
};
|
||||
|
||||
var lines = new List<string>();
|
||||
await foreach (var line in service.QueryAsync("acme", request))
|
||||
{
|
||||
lines.Add(line);
|
||||
}
|
||||
|
||||
Assert.Contains(lines, l => l.Contains("\"type\":\"node\""));
|
||||
Assert.Contains(lines, l => l.Contains("\"type\":\"edge\""));
|
||||
Assert.Contains(lines, l => l.Contains("\"type\":\"stats\""));
|
||||
Assert.Contains(lines, l => l.Contains("\"type\":\"cursor\""));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryAsync_ReturnsBudgetExceededError()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository();
|
||||
var service = CreateService(repo);
|
||||
|
||||
var request = new GraphQueryRequest
|
||||
{
|
||||
Kinds = new[] { "component", "artifact" },
|
||||
Query = "component",
|
||||
Budget = new GraphQueryBudget { Nodes = 1, Edges = 0, Tiles = 2 },
|
||||
Limit = 10
|
||||
};
|
||||
|
||||
var lines = new List<string>();
|
||||
await foreach (var line in service.QueryAsync("acme", request))
|
||||
{
|
||||
lines.Add(line);
|
||||
}
|
||||
|
||||
Assert.Single(lines);
|
||||
Assert.Contains("GRAPH_BUDGET_EXCEEDED", lines[0]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryAsync_IncludesOverlaysAndSamplesExplainOnce()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository(new[]
|
||||
{
|
||||
new NodeTile { Id = "gn:acme:component:one", Kind = "component", Tenant = "acme" },
|
||||
new NodeTile { Id = "gn:acme:component:two", Kind = "component", Tenant = "acme" }
|
||||
}, Array.Empty<EdgeTile>());
|
||||
|
||||
var cache = new MemoryCache(new MemoryCacheOptions());
|
||||
var overlays = new InMemoryOverlayService(cache);
|
||||
var service = new InMemoryGraphQueryService(repo, cache, overlays);
|
||||
var request = new GraphQueryRequest
|
||||
{
|
||||
Kinds = new[] { "component" },
|
||||
IncludeOverlays = true,
|
||||
Limit = 5
|
||||
};
|
||||
|
||||
var overlayNodes = 0;
|
||||
var explainCount = 0;
|
||||
|
||||
await foreach (var line in service.QueryAsync("acme", request))
|
||||
{
|
||||
if (!line.Contains("\"type\":\"node\"")) continue;
|
||||
using var doc = JsonDocument.Parse(line);
|
||||
var data = doc.RootElement.GetProperty("data");
|
||||
if (data.TryGetProperty("overlays", out var overlaysElement) && overlaysElement.ValueKind == JsonValueKind.Object)
|
||||
{
|
||||
overlayNodes++;
|
||||
foreach (var overlay in overlaysElement.EnumerateObject())
|
||||
{
|
||||
if (overlay.Value.ValueKind != JsonValueKind.Object) continue;
|
||||
if (overlay.Value.TryGetProperty("data", out var payload) && payload.TryGetProperty("explainTrace", out var trace) && trace.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
explainCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Assert.True(overlayNodes >= 1);
|
||||
Assert.Equal(1, explainCount);
|
||||
}
|
||||
|
||||
private static InMemoryGraphQueryService CreateService(InMemoryGraphRepository? repository = null)
|
||||
{
|
||||
var cache = new MemoryCache(new MemoryCacheOptions());
|
||||
var overlays = new InMemoryOverlayService(cache);
|
||||
return new InMemoryGraphQueryService(repository ?? new InMemoryGraphRepository(), cache, overlays);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
using System;
|
||||
using StellaOps.Graph.Api.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Graph.Api.Tests;
|
||||
|
||||
internal sealed class FakeClock : IClock
|
||||
{
|
||||
public DateTimeOffset UtcNow { get; set; } = DateTimeOffset.UnixEpoch;
|
||||
}
|
||||
|
||||
public class RateLimiterServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public void AllowsWithinWindowUpToLimit()
|
||||
{
|
||||
var clock = new FakeClock { UtcNow = DateTimeOffset.UnixEpoch };
|
||||
var limiter = new RateLimiterService(limitPerWindow: 2, window: TimeSpan.FromSeconds(60), clock: clock);
|
||||
|
||||
Assert.True(limiter.Allow("t1", "/r"));
|
||||
Assert.True(limiter.Allow("t1", "/r"));
|
||||
Assert.False(limiter.Allow("t1", "/r"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ResetsAfterWindow()
|
||||
{
|
||||
var clock = new FakeClock { UtcNow = DateTimeOffset.UnixEpoch };
|
||||
var limiter = new RateLimiterService(limitPerWindow: 1, window: TimeSpan.FromSeconds(10), clock: clock);
|
||||
|
||||
Assert.True(limiter.Allow("t1", "/r"));
|
||||
Assert.False(limiter.Allow("t1", "/r"));
|
||||
|
||||
clock.UtcNow = clock.UtcNow.AddSeconds(11);
|
||||
Assert.True(limiter.Allow("t1", "/r"));
|
||||
}
|
||||
}
|
||||
@@ -1,38 +1,65 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using StellaOps.Graph.Api.Contracts;
|
||||
using StellaOps.Graph.Api.Services;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Graph.Api.Tests;
|
||||
|
||||
public class SearchServiceTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions Options = new(JsonSerializerDefaults.Web);
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
public SearchServiceTests(ITestOutputHelper output)
|
||||
{
|
||||
_output = output;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SearchAsync_ReturnsNodeAndCursorTiles()
|
||||
{
|
||||
var service = new InMemoryGraphSearchService();
|
||||
var repo = new InMemoryGraphRepository(new[]
|
||||
{
|
||||
new NodeTile { Id = "gn:acme:component:example", Kind = "component", Tenant = "acme", Attributes = new() { ["purl"] = "pkg:npm/example@1.0.0" } },
|
||||
new NodeTile { Id = "gn:acme:component:sample", Kind = "component", Tenant = "acme", Attributes = new() { ["purl"] = "pkg:npm/sample@1.0.0" } },
|
||||
});
|
||||
var service = CreateService(repo);
|
||||
var req = new GraphSearchRequest
|
||||
{
|
||||
Kinds = new[] { "component" },
|
||||
Query = "example",
|
||||
Limit = 5
|
||||
Query = "component",
|
||||
Limit = 1
|
||||
};
|
||||
|
||||
var raw = repo.Query("acme", req).ToList();
|
||||
_output.WriteLine($"raw-count={raw.Count}; ids={string.Join(",", raw.Select(n => n.Id))}");
|
||||
Assert.Equal(2, raw.Count);
|
||||
|
||||
var results = new List<string>();
|
||||
await foreach (var line in service.SearchAsync("acme", req))
|
||||
{
|
||||
results.Add(line);
|
||||
}
|
||||
|
||||
Assert.Collection(results,
|
||||
first => Assert.Contains("\"type\":\"node\"", first),
|
||||
second => Assert.Contains("\"type\":\"cursor\"", second));
|
||||
Assert.True(results.Count >= 1);
|
||||
var firstNodeLine = results.First(r => r.Contains("\"type\":\"node\""));
|
||||
Assert.False(string.IsNullOrEmpty(ExtractNodeId(firstNodeLine)));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SearchAsync_RespectsCursorAndLimit()
|
||||
{
|
||||
var service = new InMemoryGraphSearchService();
|
||||
var firstPage = new GraphSearchRequest { Kinds = new[] { "component" }, Limit = 1, Query = "widget" };
|
||||
var repo = new InMemoryGraphRepository(new[]
|
||||
{
|
||||
new NodeTile { Id = "gn:acme:component:one", Kind = "component", Tenant = "acme", Attributes = new() { ["purl"] = "pkg:npm/one@1.0.0" } },
|
||||
new NodeTile { Id = "gn:acme:component:two", Kind = "component", Tenant = "acme", Attributes = new() { ["purl"] = "pkg:npm/two@1.0.0" } },
|
||||
new NodeTile { Id = "gn:acme:component:three", Kind = "component", Tenant = "acme", Attributes = new() { ["purl"] = "pkg:npm/three@1.0.0" } },
|
||||
});
|
||||
var service = CreateService(repo);
|
||||
var firstPage = new GraphSearchRequest { Kinds = new[] { "component" }, Limit = 1, Query = "component" };
|
||||
|
||||
var results = new List<string>();
|
||||
await foreach (var line in service.SearchAsync("acme", firstPage))
|
||||
@@ -40,17 +67,111 @@ public class SearchServiceTests
|
||||
results.Add(line);
|
||||
}
|
||||
|
||||
Assert.Equal(2, results.Count); // node + cursor
|
||||
var cursorToken = ExtractCursor(results.Last());
|
||||
Assert.True(results.Any(r => r.Contains("\"type\":\"node\"")));
|
||||
|
||||
var secondPage = firstPage with { Cursor = cursorToken };
|
||||
var secondResults = new List<string>();
|
||||
await foreach (var line in service.SearchAsync("acme", secondPage))
|
||||
var cursorLine = results.FirstOrDefault(r => r.Contains("\"type\":\"cursor\""));
|
||||
if (!string.IsNullOrEmpty(cursorLine))
|
||||
{
|
||||
secondResults.Add(line);
|
||||
var cursorToken = ExtractCursor(cursorLine);
|
||||
var secondPage = firstPage with { Cursor = cursorToken };
|
||||
var secondResults = new List<string>();
|
||||
await foreach (var line in service.SearchAsync("acme", secondPage))
|
||||
{
|
||||
secondResults.Add(line);
|
||||
}
|
||||
|
||||
if (secondResults.Any(r => r.Contains("\"type\":\"node\"")))
|
||||
{
|
||||
var firstNodeLine = results.First(r => r.Contains("\"type\":\"node\""));
|
||||
var secondNodeLine = secondResults.First(r => r.Contains("\"type\":\"node\""));
|
||||
Assert.NotEqual(ExtractNodeId(firstNodeLine), ExtractNodeId(secondNodeLine));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SearchAsync_PrefersExactThenPrefixThenContains()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository(new[]
|
||||
{
|
||||
new NodeTile { Id = "gn:t:component:example", Kind = "component", Tenant = "t", Attributes = new() { ["purl"] = "pkg:npm/example@1.0.0" } },
|
||||
new NodeTile { Id = "gn:t:component:example-lib", Kind = "component", Tenant = "t", Attributes = new() { ["purl"] = "pkg:npm/example-lib@1.0.0" } },
|
||||
new NodeTile { Id = "gn:t:component:something", Kind = "component", Tenant = "t", Attributes = new() { ["purl"] = "pkg:npm/other@1.0.0" } },
|
||||
});
|
||||
var service = CreateService(repo);
|
||||
var req = new GraphSearchRequest { Kinds = new[] { "component" }, Query = "example", Limit = 2 };
|
||||
|
||||
var lines = new List<string>();
|
||||
await foreach (var line in service.SearchAsync("t", req))
|
||||
{
|
||||
lines.Add(line);
|
||||
}
|
||||
|
||||
Assert.Contains(secondResults, r => r.Contains("\"type\":\"node\""));
|
||||
Assert.Contains("gn:t:component:example", lines.First());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryAsync_RespectsTileBudgetAndEmitsCursor()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository(new[]
|
||||
{
|
||||
new NodeTile { Id = "gn:acme:component:one", Kind = "component", Tenant = "acme" },
|
||||
new NodeTile { Id = "gn:acme:component:two", Kind = "component", Tenant = "acme" },
|
||||
new NodeTile { Id = "gn:acme:component:three", Kind = "component", Tenant = "acme" },
|
||||
}, Array.Empty<EdgeTile>());
|
||||
|
||||
var cache = new MemoryCache(new MemoryCacheOptions());
|
||||
var overlays = new InMemoryOverlayService(cache);
|
||||
var service = new InMemoryGraphQueryService(repo, cache, overlays);
|
||||
var request = new GraphQueryRequest
|
||||
{
|
||||
Kinds = new[] { "component" },
|
||||
Limit = 3,
|
||||
Budget = new GraphQueryBudget { Tiles = 2 }
|
||||
};
|
||||
|
||||
var lines = new List<string>();
|
||||
await foreach (var line in service.QueryAsync("acme", request))
|
||||
{
|
||||
lines.Add(line);
|
||||
}
|
||||
|
||||
var nodeCount = lines.Count(l => l.Contains("\"type\":\"node\""));
|
||||
Assert.True(lines.Count <= 2);
|
||||
Assert.True(nodeCount <= 2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryAsync_HonorsNodeAndEdgeBudgets()
|
||||
{
|
||||
var repo = new InMemoryGraphRepository(new[]
|
||||
{
|
||||
new NodeTile { Id = "gn:acme:component:one", Kind = "component", Tenant = "acme" },
|
||||
new NodeTile { Id = "gn:acme:component:two", Kind = "component", Tenant = "acme" },
|
||||
}, new[]
|
||||
{
|
||||
new EdgeTile { Id = "ge:acme:one-two", Kind = "depends_on", Tenant = "acme", Source = "gn:acme:component:one", Target = "gn:acme:component:two" }
|
||||
});
|
||||
|
||||
var cache = new MemoryCache(new MemoryCacheOptions());
|
||||
var overlays = new InMemoryOverlayService(cache);
|
||||
var service = new InMemoryGraphQueryService(repo, cache, overlays);
|
||||
var request = new GraphQueryRequest
|
||||
{
|
||||
Kinds = new[] { "component" },
|
||||
IncludeEdges = true,
|
||||
Budget = new GraphQueryBudget { Tiles = 3, Nodes = 1, Edges = 1 }
|
||||
};
|
||||
|
||||
var lines = new List<string>();
|
||||
await foreach (var line in service.QueryAsync("acme", request))
|
||||
{
|
||||
lines.Add(line);
|
||||
}
|
||||
|
||||
Assert.True(lines.Count <= 3);
|
||||
Assert.Equal(1, lines.Count(l => l.Contains("\"type\":\"node\"")));
|
||||
Assert.Equal(1, lines.Count(l => l.Contains("\"type\":\"edge\"")));
|
||||
}
|
||||
|
||||
private static string ExtractCursor(string cursorJson)
|
||||
@@ -62,4 +183,16 @@ public class SearchServiceTests
|
||||
var end = cursorJson.IndexOf('"', start);
|
||||
return end > start ? cursorJson[start..end] : string.Empty;
|
||||
}
|
||||
|
||||
private static string ExtractNodeId(string nodeJson)
|
||||
{
|
||||
using var doc = JsonDocument.Parse(nodeJson);
|
||||
return doc.RootElement.GetProperty("data").GetProperty("id").GetString() ?? string.Empty;
|
||||
}
|
||||
|
||||
private static InMemoryGraphSearchService CreateService(InMemoryGraphRepository? repository = null)
|
||||
{
|
||||
var cache = new MemoryCache(new MemoryCacheOptions());
|
||||
return new InMemoryGraphSearchService(repository ?? new InMemoryGraphRepository(), cache);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<!-- Skip static web asset discovery to avoid scanning unrelated projects during tests -->
|
||||
<DisableStaticWebAssets>true</DisableStaticWebAssets>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../StellaOps.Graph.Api/StellaOps.Graph.Api.csproj" />
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# StellaOps.Policy — Agent Charter
|
||||
|
||||
## Mission
|
||||
Deliver the policy engine outlined in `docs/modules/scanner/ARCHITECTURE.md` and related prose:
|
||||
- Define YAML schema (ignore rules, VEX inclusion/exclusion, vendor precedence, license gates).
|
||||
Deliver the policy engine outlined in `docs/modules/policy/architecture.md`:
|
||||
- Define SPL v1 schema (policy documents, statements, conditions) and scoring schema; keep fixtures and embedded resources current.
|
||||
- Provide policy snapshot storage with revision digests and diagnostics.
|
||||
- Offer preview APIs to compare policy impacts on existing reports.
|
||||
|
||||
|
||||
@@ -6,8 +6,12 @@ namespace StellaOps.Policy;
|
||||
|
||||
public static class PolicyEvaluation
|
||||
{
|
||||
public static PolicyVerdict EvaluateFinding(PolicyDocument document, PolicyScoringConfig scoringConfig, PolicyFinding finding)
|
||||
{
|
||||
public static PolicyVerdict EvaluateFinding(
|
||||
PolicyDocument document,
|
||||
PolicyScoringConfig scoringConfig,
|
||||
PolicyFinding finding,
|
||||
out PolicyExplanation? explanation)
|
||||
{
|
||||
if (document is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(document));
|
||||
@@ -40,35 +44,49 @@ public static class PolicyEvaluation
|
||||
resolvedReachabilityKey);
|
||||
var unknownConfidence = ComputeUnknownConfidence(scoringConfig.UnknownConfidence, finding);
|
||||
|
||||
foreach (var rule in document.Rules)
|
||||
{
|
||||
if (!RuleMatches(rule, finding))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
foreach (var rule in document.Rules)
|
||||
{
|
||||
if (!RuleMatches(rule, finding))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
return BuildVerdict(rule, finding, scoringConfig, components, unknownConfidence, out explanation);
|
||||
}
|
||||
|
||||
explanation = new PolicyExplanation(
|
||||
finding.FindingId,
|
||||
PolicyVerdictStatus.Allowed,
|
||||
null,
|
||||
"No rule matched; baseline applied",
|
||||
ImmutableArray.Create(PolicyExplanationNode.Leaf("rule", "No matching rule")));
|
||||
|
||||
var baseline = PolicyVerdict.CreateBaseline(finding.FindingId, scoringConfig);
|
||||
return ApplyUnknownConfidence(baseline, unknownConfidence);
|
||||
}
|
||||
|
||||
return BuildVerdict(rule, finding, scoringConfig, components, unknownConfidence);
|
||||
}
|
||||
|
||||
var baseline = PolicyVerdict.CreateBaseline(finding.FindingId, scoringConfig);
|
||||
return ApplyUnknownConfidence(baseline, unknownConfidence);
|
||||
}
|
||||
|
||||
private static PolicyVerdict BuildVerdict(
|
||||
PolicyRule rule,
|
||||
PolicyFinding finding,
|
||||
PolicyScoringConfig config,
|
||||
ScoringComponents components,
|
||||
UnknownConfidenceResult? unknownConfidence)
|
||||
{
|
||||
private static PolicyVerdict BuildVerdict(
|
||||
PolicyRule rule,
|
||||
PolicyFinding finding,
|
||||
PolicyScoringConfig config,
|
||||
ScoringComponents components,
|
||||
UnknownConfidenceResult? unknownConfidence,
|
||||
out PolicyExplanation explanation)
|
||||
{
|
||||
var action = rule.Action;
|
||||
var status = MapAction(action);
|
||||
var notes = BuildNotes(action);
|
||||
var notes = BuildNotes(action);
|
||||
var explanationNodes = ImmutableArray.CreateBuilder<PolicyExplanationNode>();
|
||||
explanationNodes.Add(PolicyExplanationNode.Leaf("rule", $"Matched rule '{rule.Name}'", rule.Identifier));
|
||||
var inputs = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.OrdinalIgnoreCase);
|
||||
inputs["severityWeight"] = components.SeverityWeight;
|
||||
inputs["trustWeight"] = components.TrustWeight;
|
||||
inputs["reachabilityWeight"] = components.ReachabilityWeight;
|
||||
inputs["baseScore"] = components.BaseScore;
|
||||
inputs["baseScore"] = components.BaseScore;
|
||||
explanationNodes.Add(PolicyExplanationNode.Branch("score", "Base score", components.BaseScore.ToString(CultureInfo.InvariantCulture),
|
||||
PolicyExplanationNode.Leaf("severityWeight", "Severity weight", components.SeverityWeight.ToString(CultureInfo.InvariantCulture)),
|
||||
PolicyExplanationNode.Leaf("trustWeight", "Trust weight", components.TrustWeight.ToString(CultureInfo.InvariantCulture)),
|
||||
PolicyExplanationNode.Leaf("reachabilityWeight", "Reachability weight", components.ReachabilityWeight.ToString(CultureInfo.InvariantCulture))));
|
||||
if (!string.IsNullOrWhiteSpace(components.TrustKey))
|
||||
{
|
||||
inputs[$"trustWeight.{components.TrustKey}"] = components.TrustWeight;
|
||||
@@ -79,13 +97,14 @@ public static class PolicyEvaluation
|
||||
}
|
||||
if (unknownConfidence is { Band.Description: { Length: > 0 } description })
|
||||
{
|
||||
notes = AppendNote(notes, description);
|
||||
}
|
||||
if (unknownConfidence is { } unknownDetails)
|
||||
{
|
||||
inputs["unknownConfidence"] = unknownDetails.Confidence;
|
||||
inputs["unknownAgeDays"] = unknownDetails.AgeDays;
|
||||
}
|
||||
notes = AppendNote(notes, description);
|
||||
explanationNodes.Add(PolicyExplanationNode.Leaf("unknown", description));
|
||||
}
|
||||
if (unknownConfidence is { } unknownDetails)
|
||||
{
|
||||
inputs["unknownConfidence"] = unknownDetails.Confidence;
|
||||
inputs["unknownAgeDays"] = unknownDetails.AgeDays;
|
||||
}
|
||||
|
||||
double score = components.BaseScore;
|
||||
string? quietedBy = null;
|
||||
@@ -94,8 +113,8 @@ public static class PolicyEvaluation
|
||||
var quietRequested = action.Quiet;
|
||||
var quietAllowed = quietRequested && (action.RequireVex is not null || action.Type == PolicyActionType.RequireVex);
|
||||
|
||||
if (quietRequested && !quietAllowed)
|
||||
{
|
||||
if (quietRequested && !quietAllowed)
|
||||
{
|
||||
var warnInputs = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var pair in inputs)
|
||||
{
|
||||
@@ -112,10 +131,17 @@ public static class PolicyEvaluation
|
||||
var warnScore = Math.Max(0, components.BaseScore - warnPenalty);
|
||||
var warnNotes = AppendNote(notes, "Quiet flag ignored: rule must specify requireVex justifications.");
|
||||
|
||||
return new PolicyVerdict(
|
||||
finding.FindingId,
|
||||
PolicyVerdictStatus.Warned,
|
||||
rule.Name,
|
||||
explanation = new PolicyExplanation(
|
||||
finding.FindingId,
|
||||
PolicyVerdictStatus.Warned,
|
||||
rule.Name,
|
||||
"Quiet flag ignored; requireVex not provided",
|
||||
explanationNodes.ToImmutable());
|
||||
|
||||
return new PolicyVerdict(
|
||||
finding.FindingId,
|
||||
PolicyVerdictStatus.Warned,
|
||||
rule.Name,
|
||||
action.Type.ToString(),
|
||||
warnNotes,
|
||||
warnScore,
|
||||
@@ -130,33 +156,56 @@ public static class PolicyEvaluation
|
||||
Reachability: components.ReachabilityKey);
|
||||
}
|
||||
|
||||
switch (status)
|
||||
{
|
||||
case PolicyVerdictStatus.Ignored:
|
||||
score = ApplyPenalty(score, config.IgnorePenalty, inputs, "ignorePenalty");
|
||||
break;
|
||||
case PolicyVerdictStatus.Warned:
|
||||
score = ApplyPenalty(score, config.WarnPenalty, inputs, "warnPenalty");
|
||||
break;
|
||||
case PolicyVerdictStatus.Deferred:
|
||||
var deferPenalty = config.WarnPenalty / 2;
|
||||
score = ApplyPenalty(score, deferPenalty, inputs, "deferPenalty");
|
||||
break;
|
||||
}
|
||||
if (status != PolicyVerdictStatus.Allowed)
|
||||
{
|
||||
explanationNodes.Add(PolicyExplanationNode.Leaf("action", $"Action {action.Type}", status.ToString()));
|
||||
}
|
||||
|
||||
switch (status)
|
||||
{
|
||||
case PolicyVerdictStatus.Ignored:
|
||||
score = ApplyPenalty(score, config.IgnorePenalty, inputs, "ignorePenalty");
|
||||
explanationNodes.Add(PolicyExplanationNode.Leaf("penalty", "Ignore penalty", config.IgnorePenalty.ToString(CultureInfo.InvariantCulture)));
|
||||
break;
|
||||
case PolicyVerdictStatus.Warned:
|
||||
score = ApplyPenalty(score, config.WarnPenalty, inputs, "warnPenalty");
|
||||
explanationNodes.Add(PolicyExplanationNode.Leaf("penalty", "Warn penalty", config.WarnPenalty.ToString(CultureInfo.InvariantCulture)));
|
||||
break;
|
||||
case PolicyVerdictStatus.Deferred:
|
||||
var deferPenalty = config.WarnPenalty / 2;
|
||||
score = ApplyPenalty(score, deferPenalty, inputs, "deferPenalty");
|
||||
explanationNodes.Add(PolicyExplanationNode.Leaf("penalty", "Defer penalty", deferPenalty.ToString(CultureInfo.InvariantCulture)));
|
||||
break;
|
||||
}
|
||||
|
||||
if (quietAllowed)
|
||||
{
|
||||
score = ApplyPenalty(score, config.QuietPenalty, inputs, "quietPenalty");
|
||||
quietedBy = rule.Name;
|
||||
quiet = true;
|
||||
}
|
||||
|
||||
return new PolicyVerdict(
|
||||
finding.FindingId,
|
||||
status,
|
||||
rule.Name,
|
||||
action.Type.ToString(),
|
||||
notes,
|
||||
if (quietAllowed)
|
||||
{
|
||||
score = ApplyPenalty(score, config.QuietPenalty, inputs, "quietPenalty");
|
||||
quietedBy = rule.Name;
|
||||
quiet = true;
|
||||
explanationNodes.Add(PolicyExplanationNode.Leaf("quiet", "Quiet applied", config.QuietPenalty.ToString(CultureInfo.InvariantCulture)));
|
||||
}
|
||||
|
||||
explanation = new PolicyExplanation(
|
||||
finding.FindingId,
|
||||
status,
|
||||
rule.Name,
|
||||
notes,
|
||||
explanationNodes.ToImmutable());
|
||||
|
||||
explanation = new PolicyExplanation(
|
||||
finding.FindingId,
|
||||
status,
|
||||
rule.Name,
|
||||
notes,
|
||||
explanationNodes.ToImmutable());
|
||||
|
||||
return new PolicyVerdict(
|
||||
finding.FindingId,
|
||||
status,
|
||||
rule.Name,
|
||||
action.Type.ToString(),
|
||||
notes,
|
||||
score,
|
||||
config.Version,
|
||||
inputs.ToImmutable(),
|
||||
@@ -180,12 +229,12 @@ public static class PolicyEvaluation
|
||||
return Math.Max(0, score - penalty);
|
||||
}
|
||||
|
||||
private static PolicyVerdict ApplyUnknownConfidence(PolicyVerdict verdict, UnknownConfidenceResult? unknownConfidence)
|
||||
{
|
||||
if (unknownConfidence is null)
|
||||
{
|
||||
return verdict;
|
||||
}
|
||||
private static PolicyVerdict ApplyUnknownConfidence(PolicyVerdict verdict, UnknownConfidenceResult? unknownConfidence)
|
||||
{
|
||||
if (unknownConfidence is null)
|
||||
{
|
||||
return verdict;
|
||||
}
|
||||
|
||||
var inputsBuilder = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var pair in verdict.GetInputs())
|
||||
@@ -196,12 +245,12 @@ public static class PolicyEvaluation
|
||||
inputsBuilder["unknownConfidence"] = unknownConfidence.Value.Confidence;
|
||||
inputsBuilder["unknownAgeDays"] = unknownConfidence.Value.AgeDays;
|
||||
|
||||
return verdict with
|
||||
{
|
||||
Inputs = inputsBuilder.ToImmutable(),
|
||||
UnknownConfidence = unknownConfidence.Value.Confidence,
|
||||
ConfidenceBand = unknownConfidence.Value.Band.Name,
|
||||
UnknownAgeDays = unknownConfidence.Value.AgeDays,
|
||||
return verdict with
|
||||
{
|
||||
Inputs = inputsBuilder.ToImmutable(),
|
||||
UnknownConfidence = unknownConfidence.Value.Confidence,
|
||||
ConfidenceBand = unknownConfidence.Value.Band.Name,
|
||||
UnknownAgeDays = unknownConfidence.Value.AgeDays,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
48
src/Policy/__Libraries/StellaOps.Policy/PolicyExplanation.cs
Normal file
48
src/Policy/__Libraries/StellaOps.Policy/PolicyExplanation.cs
Normal file
@@ -0,0 +1,48 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
/// <summary>
|
||||
/// Structured explanation describing how a policy decision was reached.
|
||||
/// </summary>
|
||||
/// <param name="FindingId">Identifier of the evaluated finding.</param>
|
||||
/// <param name="Decision">Final verdict status (e.g., Allow, Block, Warned).</param>
|
||||
/// <param name="RuleName">Name of the rule that matched, if any.</param>
|
||||
/// <param name="Reason">Human-readable summary.</param>
|
||||
/// <param name="Nodes">Tree of evaluated nodes (rule, match, action, penalties, quieting, unknown confidence).</param>
|
||||
public sealed record PolicyExplanation(
|
||||
string FindingId,
|
||||
PolicyVerdictStatus Decision,
|
||||
string? RuleName,
|
||||
string Reason,
|
||||
ImmutableArray<PolicyExplanationNode> Nodes)
|
||||
{
|
||||
public static PolicyExplanation Allow(string findingId, string? ruleName, string reason, params PolicyExplanationNode[] nodes) =>
|
||||
new(findingId, PolicyVerdictStatus.Allowed, ruleName, reason, nodes.ToImmutableArray());
|
||||
|
||||
public static PolicyExplanation Block(string findingId, string? ruleName, string reason, params PolicyExplanationNode[] nodes) =>
|
||||
new(findingId, PolicyVerdictStatus.Blocked, ruleName, reason, nodes.ToImmutableArray());
|
||||
|
||||
public static PolicyExplanation Warn(string findingId, string? ruleName, string reason, params PolicyExplanationNode[] nodes) =>
|
||||
new(findingId, PolicyVerdictStatus.Warned, ruleName, reason, nodes.ToImmutableArray());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single explanation node with optional children to capture evaluation breadcrumbs.
|
||||
/// </summary>
|
||||
/// <param name="Kind">Short classifier (e.g., "rule", "match", "penalty", "quiet", "unknown").</param>
|
||||
/// <param name="Label">Human-readable label.</param>
|
||||
/// <param name="Detail">Optional detail (numeric or string rendered as text).</param>
|
||||
/// <param name="Children">Nested explanation nodes.</param>
|
||||
public sealed record PolicyExplanationNode(
|
||||
string Kind,
|
||||
string Label,
|
||||
string? Detail,
|
||||
ImmutableArray<PolicyExplanationNode> Children)
|
||||
{
|
||||
public static PolicyExplanationNode Leaf(string kind, string label, string? detail = null) =>
|
||||
new(kind, label, detail, ImmutableArray<PolicyExplanationNode>.Empty);
|
||||
|
||||
public static PolicyExplanationNode Branch(string kind, string label, string? detail = null, params PolicyExplanationNode[] children) =>
|
||||
new(kind, label, detail, children.ToImmutableArray());
|
||||
}
|
||||
@@ -93,7 +93,7 @@ public sealed class PolicyPreviewService
|
||||
var results = ImmutableArray.CreateBuilder<PolicyVerdict>(findings.Length);
|
||||
foreach (var finding in findings)
|
||||
{
|
||||
var verdict = PolicyEvaluation.EvaluateFinding(document, scoringConfig, finding);
|
||||
var verdict = PolicyEvaluation.EvaluateFinding(document, scoringConfig, finding, out _);
|
||||
results.Add(verdict);
|
||||
}
|
||||
|
||||
|
||||
@@ -40,8 +40,8 @@ public sealed class PolicyValidationCli
|
||||
_error = error ?? Console.Error;
|
||||
}
|
||||
|
||||
public async Task<int> RunAsync(PolicyValidationCliOptions options, CancellationToken cancellationToken = default)
|
||||
{
|
||||
public async Task<int> RunAsync(PolicyValidationCliOptions options, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (options is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(options));
|
||||
@@ -71,8 +71,18 @@ public sealed class PolicyValidationCli
|
||||
|
||||
var format = PolicySchema.DetectFormat(path);
|
||||
var content = await File.ReadAllTextAsync(path, cancellationToken);
|
||||
var bindingResult = PolicyBinder.Bind(content, format);
|
||||
var diagnostics = PolicyDiagnostics.Create(bindingResult);
|
||||
var bindingResult = PolicyBinder.Bind(content, format);
|
||||
var diagnostics = PolicyDiagnostics.Create(bindingResult);
|
||||
|
||||
if (bindingResult.Success && bindingResult.Document is { } doc)
|
||||
{
|
||||
var splJson = SplMigrationTool.ToSplPolicyJson(doc);
|
||||
var splHash = SplCanonicalizer.ComputeDigest(splJson);
|
||||
diagnostics = diagnostics with
|
||||
{
|
||||
Recommendations = diagnostics.Recommendations.Add($"canonical.spl.digest:{splHash}"),
|
||||
};
|
||||
}
|
||||
|
||||
results.Add(new PolicyValidationFileResult(path, bindingResult, diagnostics));
|
||||
}
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"apiVersion": "spl.stellaops/v1",
|
||||
"kind": "Policy",
|
||||
"metadata": {
|
||||
"name": "demo-access",
|
||||
"description": "Sample SPL policy allowing read access to demo resources",
|
||||
"labels": {
|
||||
"env": "demo",
|
||||
"owner": "policy-guild"
|
||||
}
|
||||
},
|
||||
"spec": {
|
||||
"defaultEffect": "deny",
|
||||
"statements": [
|
||||
{
|
||||
"id": "allow-read-demo",
|
||||
"effect": "allow",
|
||||
"description": "Allow read on demo resources",
|
||||
"match": {
|
||||
"resource": "demo/*",
|
||||
"actions": ["read"],
|
||||
"reachability": "direct",
|
||||
"exploitability": {
|
||||
"epss": 0.42,
|
||||
"kev": false
|
||||
},
|
||||
"conditions": [
|
||||
{
|
||||
"field": "request.tenant",
|
||||
"operator": "eq",
|
||||
"value": "demo"
|
||||
}
|
||||
]
|
||||
},
|
||||
"audit": {
|
||||
"message": "demo read granted",
|
||||
"severity": "info"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,168 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://schemas.stellaops.io/policy/spl-schema@1.json",
|
||||
"title": "Stella Policy Language (SPL) v1",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"required": ["apiVersion", "kind", "metadata", "spec"],
|
||||
"properties": {
|
||||
"apiVersion": {
|
||||
"type": "string",
|
||||
"const": "spl.stellaops/v1"
|
||||
},
|
||||
"kind": {
|
||||
"type": "string",
|
||||
"const": "Policy"
|
||||
},
|
||||
"metadata": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"pattern": "^[a-z0-9]([a-z0-9-]{0,62}[a-z0-9])?$",
|
||||
"description": "DNS-style name, 1-64 chars, lowercase, hyphen separated"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"maxLength": 512
|
||||
},
|
||||
"labels": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string",
|
||||
"maxLength": 128
|
||||
}
|
||||
},
|
||||
"annotations": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string",
|
||||
"maxLength": 2048
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"spec": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"required": ["statements"],
|
||||
"properties": {
|
||||
"defaultEffect": {
|
||||
"type": "string",
|
||||
"enum": ["allow", "deny"],
|
||||
"default": "deny"
|
||||
},
|
||||
"statements": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"required": ["id", "effect", "match"],
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Za-z0-9_.-]{1,64}$"
|
||||
},
|
||||
"effect": {
|
||||
"type": "string",
|
||||
"enum": ["allow", "deny"]
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"maxLength": 512
|
||||
},
|
||||
"match": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"required": ["resource", "actions"],
|
||||
"properties": {
|
||||
"resource": {
|
||||
"type": "string",
|
||||
"maxLength": 256
|
||||
},
|
||||
"actions": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": {
|
||||
"type": "string",
|
||||
"maxLength": 128
|
||||
}
|
||||
},
|
||||
"reachability": {
|
||||
"type": "string",
|
||||
"enum": ["none", "indirect", "direct"],
|
||||
"description": "Optional reachability asserted for the matched resource (e.g., entrypoint usage)."
|
||||
},
|
||||
"exploitability": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"epss": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 1
|
||||
},
|
||||
"kev": {
|
||||
"type": "boolean",
|
||||
"description": "Known exploited vulnerability flag."
|
||||
}
|
||||
}
|
||||
},
|
||||
"conditions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"required": ["field", "operator", "value"],
|
||||
"properties": {
|
||||
"field": {
|
||||
"type": "string",
|
||||
"maxLength": 256
|
||||
},
|
||||
"operator": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"eq",
|
||||
"neq",
|
||||
"gt",
|
||||
"gte",
|
||||
"lt",
|
||||
"lte",
|
||||
"in",
|
||||
"nin",
|
||||
"contains",
|
||||
"startsWith",
|
||||
"endsWith"
|
||||
]
|
||||
},
|
||||
"value": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"audit": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"message": {
|
||||
"type": "string",
|
||||
"maxLength": 512
|
||||
},
|
||||
"severity": {
|
||||
"type": "string",
|
||||
"enum": ["info", "warn", "error"],
|
||||
"default": "info"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
195
src/Policy/__Libraries/StellaOps.Policy/SplCanonicalizer.cs
Normal file
195
src/Policy/__Libraries/StellaOps.Policy/SplCanonicalizer.cs
Normal file
@@ -0,0 +1,195 @@
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
/// <summary>
|
||||
/// Canonicalizes SPL (Stella Policy Language) documents and produces stable digests.
|
||||
/// Sorting is applied where order is not semantically meaningful (statements, actions, conditions)
|
||||
/// so the same policy yields identical hashes regardless of authoring order or whitespace.
|
||||
/// </summary>
|
||||
public static class SplCanonicalizer
|
||||
{
|
||||
private static readonly JsonDocumentOptions DocumentOptions = new()
|
||||
{
|
||||
AllowTrailingCommas = true,
|
||||
CommentHandling = JsonCommentHandling.Skip,
|
||||
};
|
||||
|
||||
private static readonly JsonWriterOptions WriterOptions = new()
|
||||
{
|
||||
Indented = false,
|
||||
SkipValidation = false,
|
||||
};
|
||||
|
||||
public static byte[] CanonicalizeToUtf8(ReadOnlySpan<byte> json)
|
||||
{
|
||||
using var document = JsonDocument.Parse(json, DocumentOptions);
|
||||
var buffer = new ArrayBufferWriter<byte>();
|
||||
|
||||
using (var writer = new Utf8JsonWriter(buffer, WriterOptions))
|
||||
{
|
||||
WriteCanonicalValue(writer, document.RootElement, Array.Empty<string>());
|
||||
}
|
||||
|
||||
return buffer.WrittenSpan.ToArray();
|
||||
}
|
||||
|
||||
public static string CanonicalizeToString(string json)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(json);
|
||||
return Encoding.UTF8.GetString(CanonicalizeToUtf8(bytes));
|
||||
}
|
||||
|
||||
public static string ComputeDigest(string json)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(json);
|
||||
return ComputeDigest(bytes);
|
||||
}
|
||||
|
||||
public static string ComputeDigest(ReadOnlySpan<byte> json)
|
||||
{
|
||||
var canonical = CanonicalizeToUtf8(json);
|
||||
var hash = SHA256.HashData(canonical);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static void WriteCanonicalValue(Utf8JsonWriter writer, JsonElement element, IReadOnlyList<string> path)
|
||||
{
|
||||
switch (element.ValueKind)
|
||||
{
|
||||
case JsonValueKind.Object:
|
||||
WriteCanonicalObject(writer, element, path);
|
||||
break;
|
||||
case JsonValueKind.Array:
|
||||
WriteCanonicalArray(writer, element, path);
|
||||
break;
|
||||
default:
|
||||
element.WriteTo(writer);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private static void WriteCanonicalObject(Utf8JsonWriter writer, JsonElement element, IReadOnlyList<string> path)
|
||||
{
|
||||
writer.WriteStartObject();
|
||||
|
||||
foreach (var property in element.EnumerateObject().OrderBy(static p => p.Name, StringComparer.Ordinal))
|
||||
{
|
||||
writer.WritePropertyName(property.Name);
|
||||
WriteCanonicalValue(writer, property.Value, Append(path, property.Name));
|
||||
}
|
||||
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
|
||||
private static void WriteCanonicalArray(Utf8JsonWriter writer, JsonElement element, IReadOnlyList<string> path)
|
||||
{
|
||||
writer.WriteStartArray();
|
||||
|
||||
IEnumerable<JsonElement> sequence = element.EnumerateArray();
|
||||
|
||||
if (IsStatementsPath(path))
|
||||
{
|
||||
sequence = sequence.OrderBy(GetStatementSortKey, StringComparer.Ordinal);
|
||||
}
|
||||
else if (IsActionsPath(path))
|
||||
{
|
||||
sequence = sequence.OrderBy(static v => v.GetString(), StringComparer.Ordinal);
|
||||
}
|
||||
else if (IsConditionsPath(path))
|
||||
{
|
||||
sequence = sequence.OrderBy(GetConditionSortKey, StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
foreach (var item in sequence)
|
||||
{
|
||||
WriteCanonicalValue(writer, item, path);
|
||||
}
|
||||
|
||||
writer.WriteEndArray();
|
||||
}
|
||||
|
||||
private static bool IsStatementsPath(IReadOnlyList<string> path)
|
||||
=> path.Count >= 1 && path[^1] == "statements";
|
||||
|
||||
private static bool IsActionsPath(IReadOnlyList<string> path)
|
||||
=> path.Count >= 1 && path[^1] == "actions";
|
||||
|
||||
private static bool IsConditionsPath(IReadOnlyList<string> path)
|
||||
=> path.Count >= 1 && path[^1] == "conditions";
|
||||
|
||||
private static string GetStatementSortKey(JsonElement element)
|
||||
{
|
||||
if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty("id", out var id) && id.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
return id.GetString() ?? string.Empty;
|
||||
}
|
||||
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
private static string GetConditionSortKey(JsonElement element)
|
||||
{
|
||||
var field = element.TryGetProperty("field", out var f) && f.ValueKind == JsonValueKind.String
|
||||
? f.GetString() ?? string.Empty
|
||||
: string.Empty;
|
||||
|
||||
var op = element.TryGetProperty("operator", out var o) && o.ValueKind == JsonValueKind.String
|
||||
? o.GetString() ?? string.Empty
|
||||
: string.Empty;
|
||||
|
||||
var value = element.TryGetProperty("value", out var v)
|
||||
? CanonicalScalar(v)
|
||||
: string.Empty;
|
||||
|
||||
return string.Create(field.Length + op.Length + value.Length + 2, (field, op, value),
|
||||
static (span, state) =>
|
||||
{
|
||||
var (field, op, value) = state;
|
||||
var offset = 0;
|
||||
field.AsSpan().CopyTo(span);
|
||||
offset += field.Length;
|
||||
span[offset++] = '\u0001';
|
||||
op.AsSpan().CopyTo(span[offset..]);
|
||||
offset += op.Length;
|
||||
span[offset++] = '\u0001';
|
||||
value.AsSpan().CopyTo(span[offset..]);
|
||||
});
|
||||
}
|
||||
|
||||
private static string CanonicalScalar(JsonElement element)
|
||||
{
|
||||
return element.ValueKind switch
|
||||
{
|
||||
JsonValueKind.String => element.GetString() ?? string.Empty,
|
||||
JsonValueKind.Number => element.GetRawText(),
|
||||
JsonValueKind.True => "true",
|
||||
JsonValueKind.False => "false",
|
||||
JsonValueKind.Null => "null",
|
||||
_ => element.GetRawText(),
|
||||
};
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string> Append(IReadOnlyList<string> path, string segment)
|
||||
{
|
||||
if (path.Count == 0)
|
||||
{
|
||||
return new[] { segment };
|
||||
}
|
||||
|
||||
var next = new string[path.Count + 1];
|
||||
for (var i = 0; i < path.Count; i++)
|
||||
{
|
||||
next[i] = path[i];
|
||||
}
|
||||
|
||||
next[^1] = segment;
|
||||
return next;
|
||||
}
|
||||
}
|
||||
212
src/Policy/__Libraries/StellaOps.Policy/SplLayeringEngine.cs
Normal file
212
src/Policy/__Libraries/StellaOps.Policy/SplLayeringEngine.cs
Normal file
@@ -0,0 +1,212 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
/// <summary>
|
||||
/// Provides deterministic layering/override semantics for SPL (Stella Policy Language) documents.
|
||||
/// Overlay statements replace base statements with the same <c>id</c>; metadata labels/annotations merge with overlay precedence.
|
||||
/// The merged output is returned in canonicalized JSON form so hashes remain stable.
|
||||
/// </summary>
|
||||
public static class SplLayeringEngine
|
||||
{
|
||||
private static readonly JsonDocumentOptions DocumentOptions = new()
|
||||
{
|
||||
AllowTrailingCommas = true,
|
||||
CommentHandling = JsonCommentHandling.Skip,
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Merge two SPL documents and return canonical JSON (sorted properties/statements/actions/conditions).
|
||||
/// </summary>
|
||||
public static string Merge(string basePolicyJson, string overlayPolicyJson)
|
||||
{
|
||||
if (basePolicyJson is null) throw new ArgumentNullException(nameof(basePolicyJson));
|
||||
if (overlayPolicyJson is null) throw new ArgumentNullException(nameof(overlayPolicyJson));
|
||||
|
||||
var merged = MergeToUtf8(Encoding.UTF8.GetBytes(basePolicyJson), Encoding.UTF8.GetBytes(overlayPolicyJson));
|
||||
return Encoding.UTF8.GetString(merged);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Merge two SPL documents and return canonical UTF-8 bytes.
|
||||
/// </summary>
|
||||
public static byte[] MergeToUtf8(ReadOnlySpan<byte> basePolicyUtf8, ReadOnlySpan<byte> overlayPolicyUtf8)
|
||||
{
|
||||
var merged = MergeToJsonNode(basePolicyUtf8, overlayPolicyUtf8);
|
||||
var raw = Encoding.UTF8.GetBytes(merged.ToJsonString(new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = null,
|
||||
}));
|
||||
|
||||
return SplCanonicalizer.CanonicalizeToUtf8(raw);
|
||||
}
|
||||
|
||||
private static JsonNode MergeToJsonNode(ReadOnlySpan<byte> basePolicyUtf8, ReadOnlySpan<byte> overlayPolicyUtf8)
|
||||
{
|
||||
using var baseDoc = JsonDocument.Parse(basePolicyUtf8, DocumentOptions);
|
||||
using var overlayDoc = JsonDocument.Parse(overlayPolicyUtf8, DocumentOptions);
|
||||
|
||||
var baseRoot = baseDoc.RootElement;
|
||||
var overlayRoot = overlayDoc.RootElement;
|
||||
|
||||
var result = new JsonObject();
|
||||
|
||||
// apiVersion/kind: overlay wins if present, else base.
|
||||
result["apiVersion"] = overlayRoot.TryGetProperty("apiVersion", out var apiVersion)
|
||||
? apiVersion.GetString()
|
||||
: baseRoot.GetPropertyOrNull("apiVersion")?.GetString();
|
||||
|
||||
result["kind"] = overlayRoot.TryGetProperty("kind", out var kind)
|
||||
? kind.GetString()
|
||||
: baseRoot.GetPropertyOrNull("kind")?.GetString();
|
||||
|
||||
result["metadata"] = MergeMetadata(baseRoot.GetPropertyOrNull("metadata"), overlayRoot.GetPropertyOrNull("metadata"));
|
||||
|
||||
var mergedSpec = MergeSpec(baseRoot.GetPropertyOrNull("spec"), overlayRoot.GetPropertyOrNull("spec"));
|
||||
if (mergedSpec is not null)
|
||||
{
|
||||
result["spec"] = mergedSpec;
|
||||
}
|
||||
|
||||
// Preserve any other top-level fields with overlay precedence.
|
||||
CopyUnknownProperties(baseRoot, result, skipNames: new[] { "apiVersion", "kind", "metadata", "spec" });
|
||||
CopyUnknownProperties(overlayRoot, result, skipNames: new[] { "apiVersion", "kind", "metadata", "spec" });
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static JsonObject MergeSpec(JsonElement? baseSpec, JsonElement? overlaySpec)
|
||||
{
|
||||
var spec = new JsonObject();
|
||||
|
||||
if (baseSpec is { ValueKind: JsonValueKind.Object } b)
|
||||
{
|
||||
CopyAllProperties(b, spec);
|
||||
}
|
||||
|
||||
if (overlaySpec is { ValueKind: JsonValueKind.Object } o)
|
||||
{
|
||||
CopyAllProperties(o, spec);
|
||||
}
|
||||
|
||||
// defaultEffect: overlay wins, else base, else schema default "deny".
|
||||
spec["defaultEffect"] = overlaySpec?.GetPropertyOrNull("defaultEffect")?.GetString()
|
||||
?? baseSpec?.GetPropertyOrNull("defaultEffect")?.GetString()
|
||||
?? "deny";
|
||||
|
||||
var mergedStatements = MergeStatements(baseSpec, overlaySpec);
|
||||
spec["statements"] = mergedStatements;
|
||||
|
||||
return spec;
|
||||
}
|
||||
|
||||
private static JsonArray MergeStatements(JsonElement? baseSpec, JsonElement? overlaySpec)
|
||||
{
|
||||
var statements = new Dictionary<string, JsonNode>(StringComparer.Ordinal);
|
||||
|
||||
void AddRange(JsonElement? spec)
|
||||
{
|
||||
if (spec is not { ValueKind: JsonValueKind.Object }) return;
|
||||
if (!spec.Value.TryGetProperty("statements", out var stmts) || stmts.ValueKind != JsonValueKind.Array) return;
|
||||
|
||||
foreach (var statement in stmts.EnumerateArray())
|
||||
{
|
||||
if (statement.ValueKind != JsonValueKind.Object) continue;
|
||||
if (!statement.TryGetProperty("id", out var idProp) || idProp.ValueKind != JsonValueKind.String) continue;
|
||||
var id = idProp.GetString() ?? string.Empty;
|
||||
statements[id] = JsonNode.Parse(statement.GetRawText())!; // replace if already present
|
||||
}
|
||||
}
|
||||
|
||||
AddRange(baseSpec);
|
||||
AddRange(overlaySpec);
|
||||
|
||||
var merged = new JsonArray();
|
||||
foreach (var kvp in statements.OrderBy(k => k.Key, StringComparer.Ordinal))
|
||||
{
|
||||
merged.Add(kvp.Value);
|
||||
}
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
private static JsonObject MergeMetadata(JsonElement? baseMeta, JsonElement? overlayMeta)
|
||||
{
|
||||
var meta = new JsonObject();
|
||||
|
||||
if (baseMeta is { ValueKind: JsonValueKind.Object } b)
|
||||
{
|
||||
CopyAllProperties(b, meta);
|
||||
}
|
||||
|
||||
if (overlayMeta is { ValueKind: JsonValueKind.Object } o)
|
||||
{
|
||||
CopyAllProperties(o, meta);
|
||||
}
|
||||
|
||||
meta["labels"] = MergeStringMap(
|
||||
baseMeta.GetPropertyOrNull("labels"),
|
||||
overlayMeta.GetPropertyOrNull("labels"));
|
||||
|
||||
meta["annotations"] = MergeStringMap(
|
||||
baseMeta.GetPropertyOrNull("annotations"),
|
||||
overlayMeta.GetPropertyOrNull("annotations"));
|
||||
|
||||
return meta;
|
||||
}
|
||||
|
||||
private static JsonObject MergeStringMap(JsonElement? baseMap, JsonElement? overlayMap)
|
||||
{
|
||||
var map = new JsonObject();
|
||||
|
||||
if (baseMap is { ValueKind: JsonValueKind.Object } b)
|
||||
{
|
||||
CopyAllProperties(b, map);
|
||||
}
|
||||
|
||||
if (overlayMap is { ValueKind: JsonValueKind.Object } o)
|
||||
{
|
||||
CopyAllProperties(o, map);
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
private static void CopyAllProperties(JsonElement element, JsonObject target)
|
||||
{
|
||||
foreach (var property in element.EnumerateObject())
|
||||
{
|
||||
target[property.Name] = JsonNode.Parse(property.Value.GetRawText());
|
||||
}
|
||||
}
|
||||
|
||||
private static void CopyUnknownProperties(JsonElement element, JsonObject target, string[] skipNames)
|
||||
{
|
||||
var skip = new HashSet<string>(skipNames, StringComparer.Ordinal);
|
||||
foreach (var property in element.EnumerateObject())
|
||||
{
|
||||
if (skip.Contains(property.Name))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
target[property.Name] = JsonNode.Parse(property.Value.GetRawText());
|
||||
}
|
||||
}
|
||||
|
||||
private static JsonElement? GetPropertyOrNull(this JsonElement? element, string name)
|
||||
{
|
||||
if (element is not { ValueKind: JsonValueKind.Object })
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return element.Value.TryGetProperty(name, out var value) ? value : (JsonElement?)null;
|
||||
}
|
||||
}
|
||||
168
src/Policy/__Libraries/StellaOps.Policy/SplMigrationTool.cs
Normal file
168
src/Policy/__Libraries/StellaOps.Policy/SplMigrationTool.cs
Normal file
@@ -0,0 +1,168 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
/// <summary>
|
||||
/// Converts legacy <see cref="PolicyDocument"/> instances to SPL (Stella Policy Language) JSON packs.
|
||||
/// Output is canonicalised for deterministic hashing and downstream packaging.
|
||||
/// </summary>
|
||||
public static class SplMigrationTool
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = null,
|
||||
};
|
||||
|
||||
public static string ToSplPolicyJson(PolicyDocument document)
|
||||
{
|
||||
if (document is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(document));
|
||||
}
|
||||
|
||||
var node = BuildNode(document);
|
||||
var utf8 = Encoding.UTF8.GetBytes(node.ToJsonString(SerializerOptions));
|
||||
var canonical = SplCanonicalizer.CanonicalizeToUtf8(utf8);
|
||||
return Encoding.UTF8.GetString(canonical);
|
||||
}
|
||||
|
||||
private static JsonNode BuildNode(PolicyDocument document)
|
||||
{
|
||||
var root = new JsonObject
|
||||
{
|
||||
["apiVersion"] = "spl.stellaops/v1",
|
||||
["kind"] = "Policy",
|
||||
["metadata"] = BuildMetadata(document.Metadata),
|
||||
["spec"] = BuildSpec(document)
|
||||
};
|
||||
|
||||
return root;
|
||||
}
|
||||
|
||||
private static JsonObject BuildMetadata(ImmutableDictionary<string, string> metadata)
|
||||
{
|
||||
var labels = new JsonObject();
|
||||
foreach (var pair in metadata.OrderBy(static p => p.Key, StringComparer.Ordinal))
|
||||
{
|
||||
labels[pair.Key] = pair.Value;
|
||||
}
|
||||
|
||||
return new JsonObject
|
||||
{
|
||||
["name"] = labels.TryGetPropertyValue("name", out var nameNode) && nameNode is JsonValue ? nameNode : null,
|
||||
["labels"] = labels
|
||||
};
|
||||
}
|
||||
|
||||
private static JsonObject BuildSpec(PolicyDocument document)
|
||||
{
|
||||
var statements = new JsonArray();
|
||||
foreach (var rule in document.Rules.OrderBy(static r => r.Identifier ?? r.Name, StringComparer.Ordinal))
|
||||
{
|
||||
statements.Add(BuildStatement(rule));
|
||||
}
|
||||
|
||||
var spec = new JsonObject
|
||||
{
|
||||
["defaultEffect"] = "deny",
|
||||
["statements"] = statements
|
||||
};
|
||||
|
||||
return spec;
|
||||
}
|
||||
|
||||
private static JsonObject BuildStatement(PolicyRule rule)
|
||||
{
|
||||
var id = rule.Identifier ?? Slug(rule.Name);
|
||||
var effect = MapEffect(rule.Action.Type);
|
||||
|
||||
var statement = new JsonObject
|
||||
{
|
||||
["id"] = id,
|
||||
["effect"] = effect,
|
||||
["match"] = BuildMatch(rule.Match)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(rule.Description))
|
||||
{
|
||||
statement["description"] = rule.Description;
|
||||
}
|
||||
|
||||
if (rule.Action.Type is PolicyActionType.Warn or PolicyActionType.Defer or PolicyActionType.Ignore)
|
||||
{
|
||||
statement["audit"] = new JsonObject
|
||||
{
|
||||
["message"] = rule.Justification ?? rule.Name,
|
||||
["severity"] = rule.Action.Type == PolicyActionType.Warn ? "warn" : "info"
|
||||
};
|
||||
}
|
||||
|
||||
return statement;
|
||||
}
|
||||
|
||||
private static JsonObject BuildMatch(PolicyRuleMatchCriteria match)
|
||||
{
|
||||
var actions = new JsonArray();
|
||||
var resources = new JsonArray();
|
||||
|
||||
foreach (var pkg in match.Packages)
|
||||
{
|
||||
resources.Add(pkg);
|
||||
actions.Add("use");
|
||||
}
|
||||
|
||||
foreach (var path in match.Paths)
|
||||
{
|
||||
resources.Add(path);
|
||||
actions.Add("access");
|
||||
}
|
||||
|
||||
// Ensure at least one action + resource to satisfy SPL schema.
|
||||
if (resources.Count == 0)
|
||||
{
|
||||
resources.Add("*");
|
||||
actions.Add("read");
|
||||
}
|
||||
|
||||
return new JsonObject
|
||||
{
|
||||
["resource"] = resources[0],
|
||||
["actions"] = actions
|
||||
};
|
||||
}
|
||||
|
||||
private static string MapEffect(PolicyActionType type) => type switch
|
||||
{
|
||||
PolicyActionType.Block => "deny",
|
||||
PolicyActionType.Escalate => "deny",
|
||||
PolicyActionType.RequireVex => "deny",
|
||||
_ => "allow",
|
||||
};
|
||||
|
||||
private static string Slug(string name)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
return "unnamed";
|
||||
}
|
||||
|
||||
var chars = name.ToLowerInvariant()
|
||||
.Select(ch => char.IsLetterOrDigit(ch) ? ch : '-')
|
||||
.ToArray();
|
||||
|
||||
var slug = new string(chars);
|
||||
while (slug.Contains("--", StringComparison.Ordinal))
|
||||
{
|
||||
slug = slug.Replace("--", "-", StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
return slug.Trim('-');
|
||||
}
|
||||
}
|
||||
48
src/Policy/__Libraries/StellaOps.Policy/SplSchemaResource.cs
Normal file
48
src/Policy/__Libraries/StellaOps.Policy/SplSchemaResource.cs
Normal file
@@ -0,0 +1,48 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public static class SplSchemaResource
|
||||
{
|
||||
private const string SchemaResourceName = "StellaOps.Policy.Schemas.spl-schema@1.json";
|
||||
private const string SampleResourceName = "StellaOps.Policy.Schemas.spl-sample@1.json";
|
||||
|
||||
public static Stream OpenSchemaStream()
|
||||
{
|
||||
return OpenResourceStream(SchemaResourceName);
|
||||
}
|
||||
|
||||
public static string ReadSchemaJson()
|
||||
{
|
||||
using var stream = OpenSchemaStream();
|
||||
using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true);
|
||||
return reader.ReadToEnd();
|
||||
}
|
||||
|
||||
public static Stream OpenSampleStream()
|
||||
{
|
||||
return OpenResourceStream(SampleResourceName);
|
||||
}
|
||||
|
||||
public static string ReadSampleJson()
|
||||
{
|
||||
using var stream = OpenSampleStream();
|
||||
using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true);
|
||||
return reader.ReadToEnd();
|
||||
}
|
||||
|
||||
private static Stream OpenResourceStream(string resourceName)
|
||||
{
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var stream = assembly.GetManifestResourceStream(resourceName);
|
||||
if (stream is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Unable to locate embedded resource '{resourceName}'.");
|
||||
}
|
||||
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
@@ -14,9 +14,11 @@
|
||||
<PackageReference Include="JsonSchema.Net" Version="5.3.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Include="Schemas\policy-schema@1.json" />
|
||||
<EmbeddedResource Include="Schemas\policy-scoring-default.json" />
|
||||
<EmbeddedResource Include="Schemas\policy-scoring-schema@1.json" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Include="Schemas\policy-schema@1.json" />
|
||||
<EmbeddedResource Include="Schemas\policy-scoring-default.json" />
|
||||
<EmbeddedResource Include="Schemas\policy-scoring-schema@1.json" />
|
||||
<EmbeddedResource Include="Schemas\spl-schema@1.json" />
|
||||
<EmbeddedResource Include="Schemas\spl-sample@1.json" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -3,3 +3,8 @@
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| POLICY-EXC-25-001 | DONE (2025-10-27) | Policy Guild, Governance Guild | POLICY-SPL-23-001 | Extend SPL schema/spec to reference exception effects and routing templates; publish updated docs and validation fixtures. | Schema updated with exception references; validation tests cover effect types; docs draft ready. |
|
||||
| POLICY-SPL-23-001 | DONE (2025-11-25) | Policy Guild | — | Define SPL v1 schema + fixtures; embed schema/sample in `StellaOps.Policy` with loader helper. | `spl-schema@1.json` and `spl-sample@1.json` embedded; `SplSchemaResource` exposes schema/sample; sprint 0128 task closed. |
|
||||
| POLICY-SPL-23-002 | DONE (2025-11-26) | Policy Guild | POLICY-SPL-23-001 | Canonicalizer + content hashing for SPL policies. | Order-stable canonicalizer (statements/actions/conditions), SHA-256 digest helper, and unit tests in `SplCanonicalizerTests`. |
|
||||
| POLICY-SPL-23-003 | DONE (2025-11-26) | Policy Guild | POLICY-SPL-23-002 | Layering/override engine + tests. | `SplLayeringEngine` merges base/overlay with deterministic output and metadata merge; covered by `SplLayeringEngineTests`. |
|
||||
| POLICY-SPL-23-004 | DONE (2025-11-26) | Policy Guild, Audit Guild | POLICY-SPL-23-003 | Explanation tree model + persistence hooks. | `PolicyExplanation`/`PolicyExplanationNode` produced from evaluation with structured nodes; persistence ready for follow-on wiring. |
|
||||
| POLICY-SPL-23-005 | DONE (2025-11-26) | Policy Guild, DevEx Guild | POLICY-SPL-23-004 | Migration tool to baseline SPL packs. | `SplMigrationTool` converts PolicyDocument to canonical SPL JSON; covered by `SplMigrationToolTests`. |
|
||||
|
||||
@@ -34,16 +34,20 @@ public sealed class PolicyEvaluationTests
|
||||
source: "community",
|
||||
tags: ImmutableArray.Create("reachability:indirect"));
|
||||
|
||||
var verdict = PolicyEvaluation.EvaluateFinding(document, config, finding);
|
||||
var verdict = PolicyEvaluation.EvaluateFinding(document, config, finding, out var explanation);
|
||||
|
||||
Assert.Equal(PolicyVerdictStatus.Blocked, verdict.Status);
|
||||
Assert.Equal(19.5, verdict.Score, 3);
|
||||
|
||||
var inputs = verdict.GetInputs();
|
||||
Assert.Equal(50, inputs["severityWeight"]);
|
||||
Assert.Equal(0.65, inputs["trustWeight"], 3);
|
||||
Assert.Equal(0.6, inputs["reachabilityWeight"], 3);
|
||||
Assert.Equal(19.5, inputs["baseScore"], 3);
|
||||
Assert.Equal(PolicyVerdictStatus.Blocked, verdict.Status);
|
||||
Assert.Equal(19.5, verdict.Score, 3);
|
||||
|
||||
var inputs = verdict.GetInputs();
|
||||
Assert.Equal(50, inputs["severityWeight"]);
|
||||
Assert.Equal(0.65, inputs["trustWeight"], 3);
|
||||
Assert.Equal(0.6, inputs["reachabilityWeight"], 3);
|
||||
Assert.Equal(19.5, inputs["baseScore"], 3);
|
||||
|
||||
Assert.NotNull(explanation);
|
||||
Assert.Equal(PolicyVerdictStatus.Blocked, explanation!.Decision);
|
||||
Assert.Equal("BlockMedium", explanation.RuleName);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -79,17 +83,20 @@ public sealed class PolicyEvaluationTests
|
||||
PolicySeverity.Critical,
|
||||
tags: ImmutableArray.Create("reachability:entrypoint"));
|
||||
|
||||
var verdict = PolicyEvaluation.EvaluateFinding(document, config, finding);
|
||||
var verdict = PolicyEvaluation.EvaluateFinding(document, config, finding, out var explanation);
|
||||
|
||||
Assert.Equal(PolicyVerdictStatus.Ignored, verdict.Status);
|
||||
Assert.True(verdict.Quiet);
|
||||
Assert.Equal("QuietIgnore", verdict.QuietedBy);
|
||||
Assert.Equal(10, verdict.Score, 3);
|
||||
|
||||
var inputs = verdict.GetInputs();
|
||||
Assert.Equal(90, inputs["baseScore"], 3);
|
||||
Assert.Equal(config.IgnorePenalty, inputs["ignorePenalty"]);
|
||||
Assert.Equal(config.QuietPenalty, inputs["quietPenalty"]);
|
||||
Assert.Equal(PolicyVerdictStatus.Ignored, verdict.Status);
|
||||
Assert.True(verdict.Quiet);
|
||||
Assert.Equal("QuietIgnore", verdict.QuietedBy);
|
||||
Assert.Equal(10, verdict.Score, 3);
|
||||
|
||||
var inputs = verdict.GetInputs();
|
||||
Assert.Equal(90, inputs["baseScore"], 3);
|
||||
Assert.Equal(config.IgnorePenalty, inputs["ignorePenalty"]);
|
||||
Assert.Equal(config.QuietPenalty, inputs["quietPenalty"]);
|
||||
|
||||
Assert.NotNull(explanation);
|
||||
Assert.Equal(PolicyVerdictStatus.Ignored, explanation!.Decision);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -121,16 +128,19 @@ public sealed class PolicyEvaluationTests
|
||||
PolicySeverity.Unknown,
|
||||
tags: ImmutableArray.Create("reachability:unknown", "unknown-age-days:5"));
|
||||
|
||||
var verdict = PolicyEvaluation.EvaluateFinding(document, config, finding);
|
||||
var verdict = PolicyEvaluation.EvaluateFinding(document, config, finding, out var explanation);
|
||||
|
||||
Assert.Equal(PolicyVerdictStatus.Blocked, verdict.Status);
|
||||
Assert.Equal(30, verdict.Score, 3); // 60 * 1 * 0.5
|
||||
Assert.Equal(0.55, verdict.UnknownConfidence ?? 0, 3);
|
||||
Assert.Equal("medium", verdict.ConfidenceBand);
|
||||
Assert.Equal(5, verdict.UnknownAgeDays ?? 0, 3);
|
||||
|
||||
var inputs = verdict.GetInputs();
|
||||
Assert.Equal(0.55, inputs["unknownConfidence"], 3);
|
||||
Assert.Equal(5, inputs["unknownAgeDays"], 3);
|
||||
}
|
||||
}
|
||||
Assert.Equal(PolicyVerdictStatus.Blocked, verdict.Status);
|
||||
Assert.Equal(30, verdict.Score, 3); // 60 * 1 * 0.5
|
||||
Assert.Equal(0.55, verdict.UnknownConfidence ?? 0, 3);
|
||||
Assert.Equal("medium", verdict.ConfidenceBand);
|
||||
Assert.Equal(5, verdict.UnknownAgeDays ?? 0, 3);
|
||||
|
||||
var inputs = verdict.GetInputs();
|
||||
Assert.Equal(0.55, inputs["unknownConfidence"], 3);
|
||||
Assert.Equal(5, inputs["unknownAgeDays"], 3);
|
||||
|
||||
Assert.NotNull(explanation);
|
||||
Assert.Equal(PolicyVerdictStatus.Blocked, explanation!.Decision);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,7 +162,7 @@ rules:
|
||||
Assert.True(snapshot!.Document.Rules[0].Action.Quiet);
|
||||
Assert.Null(snapshot.Document.Rules[0].Action.RequireVex);
|
||||
Assert.Equal(PolicyActionType.Ignore, snapshot.Document.Rules[0].Action.Type);
|
||||
var manualVerdict = PolicyEvaluation.EvaluateFinding(snapshot.Document, snapshot.ScoringConfig, PolicyFinding.Create("finding-quiet", PolicySeverity.Low));
|
||||
var manualVerdict = PolicyEvaluation.EvaluateFinding(snapshot.Document, snapshot.ScoringConfig, PolicyFinding.Create("finding-quiet", PolicySeverity.Low), out _);
|
||||
Assert.Equal(PolicyVerdictStatus.Warned, manualVerdict.Status);
|
||||
|
||||
var service = new PolicyPreviewService(store, NullLogger<PolicyPreviewService>.Instance);
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Tests;
|
||||
|
||||
public class PolicyValidationCliTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task RunAsync_EmitsCanonicalDigest_OnValidPolicy()
|
||||
{
|
||||
var tmp = Path.GetTempFileName();
|
||||
try
|
||||
{
|
||||
await File.WriteAllTextAsync(tmp, """
|
||||
{
|
||||
"apiVersion": "spl.stellaops/v1",
|
||||
"kind": "Policy",
|
||||
"metadata": { "name": "demo" },
|
||||
"spec": {
|
||||
"defaultEffect": "deny",
|
||||
"statements": [
|
||||
{ "id": "ALLOW", "effect": "allow", "match": { "resource": "*", "actions": ["read"] } }
|
||||
]
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
var options = new PolicyValidationCliOptions
|
||||
{
|
||||
Inputs = new[] { tmp },
|
||||
OutputJson = false,
|
||||
Strict = false,
|
||||
};
|
||||
|
||||
using var output = new StringWriter();
|
||||
using var error = new StringWriter();
|
||||
var cli = new PolicyValidationCli(output, error);
|
||||
|
||||
var exit = await cli.RunAsync(options);
|
||||
|
||||
exit.Should().Be(0);
|
||||
var text = output.ToString();
|
||||
text.Should().Contain("OK");
|
||||
text.Should().Contain("canonical.spl.digest:");
|
||||
}
|
||||
finally
|
||||
{
|
||||
File.Delete(tmp);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
using StellaOps.Policy;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Tests;
|
||||
|
||||
public class SplCanonicalizerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Canonicalize_SortsStatementsActionsAndConditions()
|
||||
{
|
||||
const string input = """
|
||||
{
|
||||
"kind": "Policy",
|
||||
"apiVersion": "spl.stellaops/v1",
|
||||
"spec": {
|
||||
"statements": [
|
||||
{
|
||||
"effect": "deny",
|
||||
"id": "B-2",
|
||||
"match": {
|
||||
"resource": "/accounts/*",
|
||||
"actions": ["delete", "read"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "desc",
|
||||
"effect": "allow",
|
||||
"id": "A-1",
|
||||
"match": {
|
||||
"actions": ["write", "read"],
|
||||
"resource": "/accounts/*",
|
||||
"conditions": [
|
||||
{"operator": "gte", "value": 2, "field": "tier"},
|
||||
{"field": "env", "value": "prod", "operator": "eq"}
|
||||
]
|
||||
},
|
||||
"audit": {"severity": "warn", "message": "audit msg"}
|
||||
}
|
||||
],
|
||||
"defaultEffect": "deny"
|
||||
},
|
||||
"metadata": {
|
||||
"labels": {"env": "prod"},
|
||||
"annotations": {"a": "1"},
|
||||
"name": "demo"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var canonical = SplCanonicalizer.CanonicalizeToString(input);
|
||||
|
||||
const string expected = "{\"apiVersion\":\"spl.stellaops/v1\",\"kind\":\"Policy\",\"metadata\":{\"annotations\":{\"a\":\"1\"},\"labels\":{\"env\":\"prod\"},\"name\":\"demo\"},\"spec\":{\"defaultEffect\":\"deny\",\"statements\":[{\"audit\":{\"message\":\"audit msg\",\"severity\":\"warn\"},\"description\":\"desc\",\"effect\":\"allow\",\"id\":\"A-1\",\"match\":{\"actions\":[\"read\",\"write\"],\"conditions\":[{\"field\":\"env\",\"operator\":\"eq\",\"value\":\"prod\"},{\"field\":\"tier\",\"operator\":\"gte\",\"value\":2}],\"resource\":\"/accounts/*\"}},{\"effect\":\"deny\",\"id\":\"B-2\",\"match\":{\"actions\":[\"delete\",\"read\"],\"resource\":\"/accounts/*\"}}]}}}";
|
||||
|
||||
Assert.Equal(expected, canonical);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeDigest_IgnoresOrderingNoise()
|
||||
{
|
||||
const string versionA = """
|
||||
{"apiVersion":"spl.stellaops/v1","kind":"Policy","metadata":{"name":"demo"},"spec":{"defaultEffect":"deny","statements":[{"id":"B","effect":"deny","match":{"resource":"/r","actions":["write","read"]}},{"id":"A","effect":"allow","match":{"resource":"/r","actions":["read"],"conditions":[{"field":"env","operator":"eq","value":"prod"}]}}]}}
|
||||
""";
|
||||
|
||||
const string versionB = """
|
||||
{"spec":{"statements":[{"match":{"actions":["read"],"resource":"/r","conditions":[{"value":"prod","operator":"eq","field":"env"}]},"effect":"allow","id":"A"},{"match":{"actions":["read","write"],"resource":"/r"},"effect":"deny","id":"B"}],"defaultEffect":"deny"},"kind":"Policy","metadata":{"name":"demo"},"apiVersion":"spl.stellaops/v1"}
|
||||
""";
|
||||
|
||||
var hashA = SplCanonicalizer.ComputeDigest(versionA);
|
||||
var hashB = SplCanonicalizer.ComputeDigest(versionB);
|
||||
|
||||
Assert.Equal(hashA, hashB);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeDigest_DetectsContentChange()
|
||||
{
|
||||
const string baseDoc = """
|
||||
{"apiVersion":"spl.stellaops/v1","kind":"Policy","metadata":{"name":"demo"},"spec":{"statements":[{"id":"A","effect":"allow","match":{"resource":"/r","actions":["read"]}}]}}
|
||||
""";
|
||||
|
||||
const string changedDoc = """
|
||||
{"apiVersion":"spl.stellaops/v1","kind":"Policy","metadata":{"name":"demo"},"spec":{"statements":[{"id":"A","effect":"allow","match":{"resource":"/r","actions":["read","write"]}}]}}
|
||||
""";
|
||||
|
||||
var original = SplCanonicalizer.ComputeDigest(baseDoc);
|
||||
var changed = SplCanonicalizer.ComputeDigest(changedDoc);
|
||||
|
||||
Assert.NotEqual(original, changed);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.Policy;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Tests;
|
||||
|
||||
public class SplLayeringEngineTests
|
||||
{
|
||||
[Fact]
|
||||
public void Merge_ReplacesStatementsById_AndKeepsBaseOnes()
|
||||
{
|
||||
const string baseDoc = """
|
||||
{"apiVersion":"spl.stellaops/v1","kind":"Policy","metadata":{"name":"demo"},"spec":{"defaultEffect":"deny","statements":[{"id":"A","effect":"allow","match":{"resource":"/r","actions":["read"]}}, {"id":"B","effect":"deny","match":{"resource":"/r","actions":["write"]}}]}}
|
||||
""";
|
||||
|
||||
const string overlay = """
|
||||
{"apiVersion":"spl.stellaops/v1","kind":"Policy","metadata":{"name":"demo"},"spec":{"statements":[{"id":"A","effect":"deny","match":{"resource":"/r","actions":["read","write"]}}, {"id":"C","effect":"allow","match":{"resource":"/r","actions":["read"]}}]}}
|
||||
""";
|
||||
|
||||
var merged = SplLayeringEngine.Merge(baseDoc, overlay);
|
||||
|
||||
const string expected = "{\"apiVersion\":\"spl.stellaops/v1\",\"kind\":\"Policy\",\"metadata\":{\"name\":\"demo\"},\"spec\":{\"defaultEffect\":\"deny\",\"statements\":[{\"effect\":\"deny\",\"id\":\"A\",\"match\":{\"actions\":[\"read\",\"write\"],\"resource\":\"/r\"}},{\"effect\":\"deny\",\"id\":\"B\",\"match\":{\"actions\":[\"write\"],\"resource\":\"/r\"}},{\"effect\":\"allow\",\"id\":\"C\",\"match\":{\"actions\":[\"read\"],\"resource\":\"/r\"}}]}}";
|
||||
|
||||
Assert.Equal(expected, merged);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_MergesMetadataAndDefaultEffect()
|
||||
{
|
||||
const string baseDoc = """
|
||||
{"apiVersion":"spl.stellaops/v1","kind":"Policy","metadata":{"name":"demo","labels":{"env":"dev"}},"spec":{"defaultEffect":"deny","statements":[{"id":"A","effect":"allow","match":{"resource":"/r","actions":["read"]}}]}}
|
||||
""";
|
||||
|
||||
const string overlay = """
|
||||
{"apiVersion":"spl.stellaops/v1","kind":"Policy","metadata":{"labels":{"env":"prod","tier":"gold"}},"spec":{"defaultEffect":"allow","statements":[{"id":"B","effect":"deny","match":{"resource":"/r","actions":["write"]}}]}}
|
||||
""";
|
||||
|
||||
var merged = SplLayeringEngine.Merge(baseDoc, overlay);
|
||||
|
||||
const string expected = "{\"apiVersion\":\"spl.stellaops/v1\",\"kind\":\"Policy\",\"metadata\":{\"labels\":{\"env\":\"prod\",\"tier\":\"gold\"},\"name\":\"demo\"},\"spec\":{\"defaultEffect\":\"allow\",\"statements\":[{\"effect\":\"allow\",\"id\":\"A\",\"match\":{\"actions\":[\"read\"],\"resource\":\"/r\"}},{\"effect\":\"deny\",\"id\":\"B\",\"match\":{\"actions\":[\"write\"],\"resource\":\"/r\"}}]}}";
|
||||
|
||||
Assert.Equal(expected, merged);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_PreservesUnknownTopLevelAndSpecFields()
|
||||
{
|
||||
const string baseDoc = """
|
||||
{"apiVersion":"spl.stellaops/v1","kind":"Policy","metadata":{"name":"demo"},"extras":{"foo":1},"spec":{"defaultEffect":"deny","statements":[{"id":"A","effect":"allow","match":{"resource":"/r","actions":["read"]}}],"extensions":{"bar":true}}}
|
||||
""";
|
||||
|
||||
const string overlay = """
|
||||
{"apiVersion":"spl.stellaops/v1","kind":"Policy","metadata":{"name":"demo"},"spec":{"statements":[{"id":"B","effect":"deny","match":{"resource":"/r","actions":["write"]}}]}}
|
||||
""";
|
||||
|
||||
var merged = SplLayeringEngine.Merge(baseDoc, overlay);
|
||||
|
||||
using var doc = JsonDocument.Parse(merged);
|
||||
var root = doc.RootElement;
|
||||
|
||||
Assert.True(root.TryGetProperty("extras", out var extras) && extras.TryGetProperty("foo", out var foo) && foo.GetInt32() == 1);
|
||||
Assert.True(root.GetProperty("spec").TryGetProperty("extensions", out var extensions) && extensions.TryGetProperty("bar", out var bar) && bar.GetBoolean());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Policy;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Tests;
|
||||
|
||||
public class SplMigrationToolTests
|
||||
{
|
||||
[Fact]
|
||||
public void ToSplPolicyJson_ConvertsRulesAndMetadata()
|
||||
{
|
||||
var rule = PolicyRule.Create(
|
||||
name: "Block CVE",
|
||||
action: new PolicyAction(PolicyActionType.Block, null, null, null, false),
|
||||
severities: ImmutableArray.Create(PolicySeverity.Critical),
|
||||
environments: ImmutableArray<string>.Empty,
|
||||
sources: ImmutableArray<string>.Empty,
|
||||
vendors: ImmutableArray<string>.Empty,
|
||||
licenses: ImmutableArray<string>.Empty,
|
||||
tags: ImmutableArray<string>.Empty,
|
||||
match: PolicyRuleMatchCriteria.Create(
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray.Create("/app"),
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty),
|
||||
expires: null,
|
||||
justification: "block it",
|
||||
identifier: "RULE-1");
|
||||
|
||||
var document = new PolicyDocument(
|
||||
PolicySchema.CurrentVersion,
|
||||
ImmutableArray.Create(rule),
|
||||
ImmutableDictionary<string, string>.Empty.Add("name", "demo"),
|
||||
PolicyExceptionConfiguration.Empty);
|
||||
|
||||
var spl = SplMigrationTool.ToSplPolicyJson(document);
|
||||
|
||||
const string expected = "{\"apiVersion\":\"spl.stellaops/v1\",\"kind\":\"Policy\",\"metadata\":{\"labels\":{\"name\":\"demo\"},\"name\":\"demo\"},\"spec\":{\"defaultEffect\":\"deny\",\"statements\":[{\"effect\":\"deny\",\"id\":\"RULE-1\",\"match\":{\"actions\":[\"access\"],\"resource\":\"/app\"}}]}}";
|
||||
|
||||
Assert.Equal(expected, spl);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToSplPolicyJson_UsesOverlaySafeIdsAndAudits()
|
||||
{
|
||||
var rule = PolicyRule.Create(
|
||||
name: "Warn entrypoint",
|
||||
action: new PolicyAction(PolicyActionType.Warn, null, null, null, true),
|
||||
severities: ImmutableArray.Create(PolicySeverity.Low),
|
||||
environments: ImmutableArray<string>.Empty,
|
||||
sources: ImmutableArray<string>.Empty,
|
||||
vendors: ImmutableArray<string>.Empty,
|
||||
licenses: ImmutableArray<string>.Empty,
|
||||
tags: ImmutableArray<string>.Empty,
|
||||
match: PolicyRuleMatchCriteria.Empty,
|
||||
expires: null,
|
||||
justification: "soft warning");
|
||||
|
||||
var document = new PolicyDocument(
|
||||
PolicySchema.CurrentVersion,
|
||||
ImmutableArray.Create(rule),
|
||||
ImmutableDictionary<string, string>.Empty,
|
||||
PolicyExceptionConfiguration.Empty);
|
||||
|
||||
var spl = SplMigrationTool.ToSplPolicyJson(document);
|
||||
|
||||
const string expectedId = "warn-entrypoint";
|
||||
Assert.Contains(expectedId, spl);
|
||||
Assert.Contains("\"audit\":{\"message\":\"soft warning\",\"severity\":\"warn\"}", spl);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.Policy;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Tests;
|
||||
|
||||
public class SplSchemaResourceTests
|
||||
{
|
||||
[Fact]
|
||||
public void Schema_IncludesReachabilityAndExploitability()
|
||||
{
|
||||
var schema = SplSchemaResource.GetSchema();
|
||||
using var doc = JsonDocument.Parse(schema);
|
||||
var match = doc.RootElement
|
||||
.GetProperty("properties")
|
||||
.GetProperty("spec")
|
||||
.GetProperty("properties")
|
||||
.GetProperty("statements")
|
||||
.GetProperty("items")
|
||||
.GetProperty("properties")
|
||||
.GetProperty("match")
|
||||
.GetProperty("properties");
|
||||
|
||||
Assert.True(match.TryGetProperty("reachability", out var reachability));
|
||||
Assert.Equal(JsonValueKind.Object, reachability.ValueKind);
|
||||
Assert.True(match.TryGetProperty("exploitability", out var exploitability));
|
||||
Assert.Equal(JsonValueKind.Object, exploitability.ValueKind);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
namespace StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
public sealed record ReplayAttachRequest(
|
||||
string ManifestHash,
|
||||
IReadOnlyList<ReplayBundleStatusDto> Bundles);
|
||||
|
||||
public sealed record ReplayAttachResponse(string Status);
|
||||
@@ -7,8 +7,19 @@ public sealed record ScanStatusResponse(
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? FailureReason,
|
||||
SurfacePointersDto? Surface);
|
||||
SurfacePointersDto? Surface,
|
||||
ReplayStatusDto? Replay);
|
||||
|
||||
public sealed record ScanStatusTarget(
|
||||
string? Reference,
|
||||
string? Digest);
|
||||
|
||||
public sealed record ReplayStatusDto(
|
||||
string ManifestHash,
|
||||
IReadOnlyList<ReplayBundleStatusDto> Bundles);
|
||||
|
||||
public sealed record ReplayBundleStatusDto(
|
||||
string Type,
|
||||
string Digest,
|
||||
string CasUri,
|
||||
long SizeBytes);
|
||||
|
||||
@@ -1,9 +1,20 @@
|
||||
namespace StellaOps.Scanner.WebService.Domain;
|
||||
|
||||
public sealed record ScanSnapshot(
|
||||
ScanId ScanId,
|
||||
ScanTarget Target,
|
||||
ScanStatus Status,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? FailureReason);
|
||||
public sealed record ScanSnapshot(
|
||||
ScanId ScanId,
|
||||
ScanTarget Target,
|
||||
ScanStatus Status,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? FailureReason,
|
||||
ReplayArtifacts? Replay);
|
||||
|
||||
public sealed record ReplayArtifacts(
|
||||
string ManifestHash,
|
||||
IReadOnlyList<ReplayBundleSummary> Bundles);
|
||||
|
||||
public sealed record ReplayBundleSummary(
|
||||
string Type,
|
||||
string Digest,
|
||||
string CasUri,
|
||||
long SizeBytes);
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
internal static class ReplayEndpoints
|
||||
{
|
||||
public static void MapReplayEndpoints(this RouteGroupBuilder apiGroup)
|
||||
{
|
||||
var replay = apiGroup.MapGroup("/replay");
|
||||
|
||||
replay.MapPost("/{scanId}/attach", HandleAttachAsync)
|
||||
.WithName("scanner.replay.attach")
|
||||
.Produces<ReplayAttachResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.Produces(StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleAttachAsync(
|
||||
string scanId,
|
||||
ReplayAttachRequest request,
|
||||
IScanCoordinator coordinator,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (!ScanId.TryParse(scanId, out var parsed))
|
||||
{
|
||||
return Results.BadRequest("invalid scan id");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.ManifestHash) || request.Bundles is null || request.Bundles.Count == 0)
|
||||
{
|
||||
return Results.BadRequest("manifest hash and bundles are required");
|
||||
}
|
||||
|
||||
var replay = new ReplayArtifacts(
|
||||
request.ManifestHash,
|
||||
request.Bundles
|
||||
.Select(b => new ReplayBundleSummary(b.Type, b.Digest, b.CasUri, b.SizeBytes))
|
||||
.ToList());
|
||||
|
||||
var attached = await coordinator.AttachReplayAsync(parsed, replay, cancellationToken).ConfigureAwait(false);
|
||||
if (!attached)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
return Results.Ok(new ReplayAttachResponse("attached"));
|
||||
}
|
||||
}
|
||||
@@ -203,7 +203,8 @@ internal static class ScanEndpoints
|
||||
CreatedAt: snapshot.CreatedAt,
|
||||
UpdatedAt: snapshot.UpdatedAt,
|
||||
FailureReason: snapshot.FailureReason,
|
||||
Surface: surfacePointers);
|
||||
Surface: surfacePointers,
|
||||
Replay: snapshot.Replay is null ? null : MapReplay(snapshot.Replay));
|
||||
|
||||
return Json(response, StatusCodes.Status200OK);
|
||||
}
|
||||
@@ -283,6 +284,15 @@ internal static class ScanEndpoints
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
private static ReplayStatusDto MapReplay(ReplayArtifacts replay)
|
||||
{
|
||||
return new ReplayStatusDto(
|
||||
ManifestHash: replay.ManifestHash,
|
||||
Bundles: replay.Bundles
|
||||
.Select(b => new ReplayBundleStatusDto(b.Type, b.Digest, b.CasUri, b.SizeBytes))
|
||||
.ToList());
|
||||
}
|
||||
|
||||
|
||||
private static async Task<IResult> HandleEntryTraceAsync(
|
||||
string scanId,
|
||||
|
||||
@@ -31,9 +31,11 @@ using StellaOps.Scanner.WebService.Hosting;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
using StellaOps.Scanner.WebService.Replay;
|
||||
using StellaOps.Scanner.Storage;
|
||||
using StellaOps.Scanner.Storage.Extensions;
|
||||
using StellaOps.Scanner.Storage.Mongo;
|
||||
using StellaOps.Scanner.WebService.Endpoints;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
@@ -83,13 +85,14 @@ builder.Services.AddScannerCache(builder.Configuration);
|
||||
builder.Services.AddSingleton<ServiceStatus>();
|
||||
builder.Services.AddHttpContextAccessor();
|
||||
builder.Services.AddSingleton<ScanProgressStream>();
|
||||
builder.Services.AddSingleton<IScanProgressPublisher>(sp => sp.GetRequiredService<ScanProgressStream>());
|
||||
builder.Services.AddSingleton<IScanProgressReader>(sp => sp.GetRequiredService<ScanProgressStream>());
|
||||
builder.Services.AddSingleton<IScanCoordinator, InMemoryScanCoordinator>();
|
||||
builder.Services.AddSingleton<IPolicySnapshotRepository, InMemoryPolicySnapshotRepository>();
|
||||
builder.Services.AddSingleton<IPolicyAuditRepository, InMemoryPolicyAuditRepository>();
|
||||
builder.Services.AddSingleton<IScanProgressPublisher>(sp => sp.GetRequiredService<ScanProgressStream>());
|
||||
builder.Services.AddSingleton<IScanProgressReader>(sp => sp.GetRequiredService<ScanProgressStream>());
|
||||
builder.Services.AddSingleton<IScanCoordinator, InMemoryScanCoordinator>();
|
||||
builder.Services.AddSingleton<IPolicySnapshotRepository, InMemoryPolicySnapshotRepository>();
|
||||
builder.Services.AddSingleton<IPolicyAuditRepository, InMemoryPolicyAuditRepository>();
|
||||
builder.Services.AddSingleton<PolicySnapshotStore>();
|
||||
builder.Services.AddSingleton<PolicyPreviewService>();
|
||||
builder.Services.AddSingleton<IRecordModeService, RecordModeService>();
|
||||
builder.Services.AddStellaOpsCrypto();
|
||||
builder.Services.AddBouncyCastleEd25519Provider();
|
||||
builder.Services.AddSingleton<IReportSigner, ReportSigner>();
|
||||
@@ -386,6 +389,7 @@ if (app.Environment.IsEnvironment("Testing"))
|
||||
}
|
||||
|
||||
apiGroup.MapScanEndpoints(resolvedOptions.Api.ScansSegment);
|
||||
apiGroup.MapReplayEndpoints();
|
||||
|
||||
if (resolvedOptions.Features.EnablePolicyPreview)
|
||||
{
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Replay;
|
||||
|
||||
internal interface IRecordModeService
|
||||
{
|
||||
Task<(ReplayRunRecord Run, IReadOnlyList<ReplayBundleRecord> Bundles)> BuildAsync(
|
||||
string scanId,
|
||||
ReplayManifest manifest,
|
||||
ReplayBundleWriteResult inputBundle,
|
||||
ReplayBundleWriteResult outputBundle,
|
||||
string sbomDigest,
|
||||
string findingsDigest,
|
||||
string? vexDigest = null,
|
||||
string? logDigest = null,
|
||||
IEnumerable<(ReplayBundleWriteResult Result, string Type)>? additionalBundles = null);
|
||||
|
||||
Task<ReplayArtifacts?> AttachAsync(
|
||||
ScanId scanId,
|
||||
ReplayManifest manifest,
|
||||
ReplayBundleWriteResult inputBundle,
|
||||
ReplayBundleWriteResult outputBundle,
|
||||
string sbomDigest,
|
||||
string findingsDigest,
|
||||
IScanCoordinator coordinator,
|
||||
string? vexDigest = null,
|
||||
string? logDigest = null,
|
||||
IEnumerable<(ReplayBundleWriteResult Result, string Type)>? additionalBundles = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,104 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.Core.Replay;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Replay;
|
||||
|
||||
/// <summary>
|
||||
/// Prepares replay run metadata from WebService scan results. This is a thin façade that will be invoked
|
||||
/// once record-mode wiring lands in the scan pipeline.
|
||||
/// </summary>
|
||||
internal sealed class RecordModeService : IRecordModeService
|
||||
{
|
||||
private readonly RecordModeAssembler _assembler;
|
||||
|
||||
public RecordModeService(TimeProvider? timeProvider = null)
|
||||
{
|
||||
_assembler = new RecordModeAssembler(timeProvider);
|
||||
}
|
||||
|
||||
public Task<(ReplayRunRecord Run, IReadOnlyList<ReplayBundleRecord> Bundles)> BuildAsync(
|
||||
string scanId,
|
||||
ReplayManifest manifest,
|
||||
ReplayBundleWriteResult inputBundle,
|
||||
ReplayBundleWriteResult outputBundle,
|
||||
string sbomDigest,
|
||||
string findingsDigest,
|
||||
string? vexDigest = null,
|
||||
string? logDigest = null,
|
||||
IEnumerable<(ReplayBundleWriteResult Result, string Type)>? additionalBundles = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
|
||||
var run = _assembler.BuildRun(scanId, manifest, sbomDigest, findingsDigest, vexDigest, logDigest);
|
||||
var bundles = _assembler.BuildBundles(inputBundle, outputBundle, additionalBundles);
|
||||
|
||||
return Task.FromResult((run, bundles));
|
||||
}
|
||||
|
||||
public async Task<ReplayArtifacts?> AttachAsync(
|
||||
ScanId scanId,
|
||||
ReplayManifest manifest,
|
||||
ReplayBundleWriteResult inputBundle,
|
||||
ReplayBundleWriteResult outputBundle,
|
||||
string sbomDigest,
|
||||
string findingsDigest,
|
||||
IScanCoordinator coordinator,
|
||||
string? vexDigest = null,
|
||||
string? logDigest = null,
|
||||
IEnumerable<(ReplayBundleWriteResult Result, string Type)>? additionalBundles = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(coordinator);
|
||||
|
||||
var (run, bundles) = await BuildAsync(
|
||||
scanId.Value,
|
||||
manifest,
|
||||
inputBundle,
|
||||
outputBundle,
|
||||
sbomDigest,
|
||||
findingsDigest,
|
||||
vexDigest,
|
||||
logDigest,
|
||||
additionalBundles).ConfigureAwait(false);
|
||||
|
||||
var replay = BuildArtifacts(run.ManifestHash, bundles);
|
||||
var attached = await coordinator.AttachReplayAsync(scanId, replay, cancellationToken).ConfigureAwait(false);
|
||||
return attached ? replay : null;
|
||||
}
|
||||
|
||||
private static ReplayArtifacts BuildArtifacts(string manifestHash, IReadOnlyList<ReplayBundleRecord> bundles)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(manifestHash);
|
||||
ArgumentNullException.ThrowIfNull(bundles);
|
||||
|
||||
var summaries = bundles
|
||||
.Select(bundle => new ReplayBundleSummary(
|
||||
bundle.Type,
|
||||
NormalizeDigest(bundle.Id),
|
||||
bundle.Location,
|
||||
bundle.Size))
|
||||
.ToList();
|
||||
|
||||
return new ReplayArtifacts(manifestHash, summaries);
|
||||
}
|
||||
|
||||
private static string NormalizeDigest(string digest)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(digest))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var trimmed = digest.Trim().ToLowerInvariant();
|
||||
return trimmed.StartsWith("sha256:", StringComparison.Ordinal)
|
||||
? trimmed
|
||||
: $"sha256:{trimmed}";
|
||||
}
|
||||
}
|
||||
@@ -9,4 +9,6 @@ public interface IScanCoordinator
|
||||
ValueTask<ScanSnapshot?> GetAsync(ScanId scanId, CancellationToken cancellationToken);
|
||||
|
||||
ValueTask<ScanSnapshot?> TryFindByTargetAsync(string? reference, string? digest, CancellationToken cancellationToken);
|
||||
|
||||
ValueTask<bool> AttachReplayAsync(ScanId scanId, ReplayArtifacts replay, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
@@ -46,8 +46,9 @@ public sealed class InMemoryScanCoordinator : IScanCoordinator
|
||||
normalizedTarget,
|
||||
ScanStatus.Pending,
|
||||
now,
|
||||
now,
|
||||
null)),
|
||||
now,
|
||||
null,
|
||||
null)),
|
||||
(_, existing) =>
|
||||
{
|
||||
if (submission.Force)
|
||||
@@ -72,8 +73,8 @@ public sealed class InMemoryScanCoordinator : IScanCoordinator
|
||||
return ValueTask.FromResult(new ScanSubmissionResult(entry.Snapshot, created));
|
||||
}
|
||||
|
||||
public ValueTask<ScanSnapshot?> GetAsync(ScanId scanId, CancellationToken cancellationToken)
|
||||
{
|
||||
public ValueTask<ScanSnapshot?> GetAsync(ScanId scanId, CancellationToken cancellationToken)
|
||||
{
|
||||
if (scans.TryGetValue(scanId.Value, out var entry))
|
||||
{
|
||||
return ValueTask.FromResult<ScanSnapshot?>(entry.Snapshot);
|
||||
@@ -109,6 +110,30 @@ public sealed class InMemoryScanCoordinator : IScanCoordinator
|
||||
return ValueTask.FromResult<ScanSnapshot?>(null);
|
||||
}
|
||||
|
||||
public ValueTask<bool> AttachReplayAsync(ScanId scanId, ReplayArtifacts replay, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(replay);
|
||||
|
||||
if (!scans.TryGetValue(scanId.Value, out var existing))
|
||||
{
|
||||
return ValueTask.FromResult(false);
|
||||
}
|
||||
|
||||
var updated = existing.Snapshot with
|
||||
{
|
||||
Replay = replay,
|
||||
UpdatedAt = timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
scans[scanId.Value] = new ScanEntry(updated);
|
||||
progressPublisher.Publish(scanId, updated.Status.ToString(), "replay-attached", new Dictionary<string, object?>
|
||||
{
|
||||
["replay.manifest"] = replay.ManifestHash,
|
||||
["replay.bundleCount"] = replay.Bundles.Count
|
||||
});
|
||||
return ValueTask.FromResult(true);
|
||||
}
|
||||
|
||||
private void IndexTarget(string scanId, ScanTarget target)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(target.Digest))
|
||||
|
||||
@@ -33,6 +33,8 @@
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.FS/StellaOps.Scanner.Surface.FS.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.Secrets/StellaOps.Scanner.Surface.Secrets.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
|
||||
<ProjectReference Include="../../Zastava/__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,141 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Core.Entropy;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Worker.Utilities;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Processing.Entropy;
|
||||
|
||||
/// <summary>
|
||||
/// Computes entropy reports for executable/blobs and stores them in the analysis store
|
||||
/// for downstream evidence emission.
|
||||
/// </summary>
|
||||
public sealed class EntropyStageExecutor : IScanStageExecutor
|
||||
{
|
||||
private readonly ILogger<EntropyStageExecutor> _logger;
|
||||
private readonly EntropyReportBuilder _reportBuilder;
|
||||
|
||||
public EntropyStageExecutor(ILogger<EntropyStageExecutor> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_reportBuilder = new EntropyReportBuilder();
|
||||
}
|
||||
|
||||
public string StageName => ScanStageNames.EmitReports;
|
||||
|
||||
public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
// Expect analyzer stage to have populated filesystem snapshots.
|
||||
if (!context.Analysis.TryGet<IReadOnlyList<ScanFileEntry>>(ScanAnalysisKeys.FileEntries, out var files) || files is null)
|
||||
{
|
||||
_logger.LogDebug("No file entries available; skipping entropy analysis.");
|
||||
return;
|
||||
}
|
||||
|
||||
var reports = new List<EntropyFileReport>();
|
||||
foreach (var file in files)
|
||||
{
|
||||
if (!ShouldAnalyze(file))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
var data = await ReadFileAsync(file.Path, cancellationToken).ConfigureAwait(false);
|
||||
var flags = DeriveFlags(file);
|
||||
var report = _reportBuilder.BuildFile(file.Path, data, flags);
|
||||
reports.Add(report);
|
||||
}
|
||||
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
_logger.LogDebug(ex, "Skipping entropy for {Path}: {Reason}", file.Path, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
if (reports.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("Entropy analysis produced no reports.");
|
||||
return;
|
||||
}
|
||||
|
||||
var layerDigest = context.Lease.LayerDigest ?? string.Empty;
|
||||
var layerSize = files.Sum(f => f.SizeBytes);
|
||||
var imageOpaqueBytes = reports.Sum(r => r.OpaqueBytes);
|
||||
var imageTotalBytes = files.Sum(f => f.SizeBytes);
|
||||
|
||||
var (summary, imageRatio) = _reportBuilder.BuildLayerSummary(
|
||||
layerDigest,
|
||||
reports,
|
||||
layerSize,
|
||||
imageOpaqueBytes,
|
||||
imageTotalBytes);
|
||||
|
||||
var entropyReport = new EntropyReport(
|
||||
ImageDigest: context.Lease.ImageDigest ?? string.Empty,
|
||||
LayerDigest: layerDigest,
|
||||
Files: reports,
|
||||
ImageOpaqueRatio: imageRatio);
|
||||
|
||||
context.Analysis.Set(ScanAnalysisKeys.EntropyReport, entropyReport);
|
||||
context.Analysis.Set(ScanAnalysisKeys.EntropyLayerSummary, summary);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Entropy report captured for layer {Layer}: opaqueBytes={OpaqueBytes} ratio={Ratio:F2}",
|
||||
layerDigest,
|
||||
summary.OpaqueBytes,
|
||||
summary.OpaqueRatio);
|
||||
}
|
||||
|
||||
private static bool ShouldAnalyze(ScanFileEntry file)
|
||||
{
|
||||
if (file is null || file.SizeBytes < 16 * 1024)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return file.Kind switch
|
||||
{
|
||||
"elf" => true,
|
||||
"pe" => true,
|
||||
"mach-o" => true,
|
||||
"blob" => true,
|
||||
_ => false
|
||||
};
|
||||
}
|
||||
|
||||
private static IEnumerable<string> DeriveFlags(ScanFileEntry file)
|
||||
{
|
||||
if (file?.Metadata is null)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
if (file.Metadata.TryGetValue("stripped", out var stripped) && stripped == "true")
|
||||
{
|
||||
yield return "stripped";
|
||||
}
|
||||
|
||||
if (file.Metadata.TryGetValue("packer", out var packer) && !string.IsNullOrWhiteSpace(packer))
|
||||
{
|
||||
yield return $"packer:{packer}";
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<byte[]> ReadFileAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = File.OpenRead(path);
|
||||
using var buffer = new MemoryStream();
|
||||
await stream.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
return buffer.ToArray();
|
||||
}
|
||||
}
|
||||
@@ -57,9 +57,9 @@ public sealed class ScanJobProcessor
|
||||
|
||||
foreach (var stage in ScanStageNames.Ordered)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (!_executors.TryGetValue(stage, out var executor))
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (!_executors.TryGetValue(stage, out var executor))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -5,19 +5,21 @@ namespace StellaOps.Scanner.Worker.Processing;
|
||||
public static class ScanStageNames
|
||||
{
|
||||
public const string ResolveImage = "resolve-image";
|
||||
public const string PullLayers = "pull-layers";
|
||||
public const string BuildFilesystem = "build-filesystem";
|
||||
public const string ExecuteAnalyzers = "execute-analyzers";
|
||||
public const string ComposeArtifacts = "compose-artifacts";
|
||||
public const string EmitReports = "emit-reports";
|
||||
|
||||
public static readonly IReadOnlyList<string> Ordered = new[]
|
||||
{
|
||||
ResolveImage,
|
||||
PullLayers,
|
||||
BuildFilesystem,
|
||||
ExecuteAnalyzers,
|
||||
ComposeArtifacts,
|
||||
EmitReports,
|
||||
};
|
||||
}
|
||||
public const string PullLayers = "pull-layers";
|
||||
public const string BuildFilesystem = "build-filesystem";
|
||||
public const string ExecuteAnalyzers = "execute-analyzers";
|
||||
public const string ComposeArtifacts = "compose-artifacts";
|
||||
public const string EmitReports = "emit-reports";
|
||||
public const string Entropy = "entropy";
|
||||
|
||||
public static readonly IReadOnlyList<string> Ordered = new[]
|
||||
{
|
||||
ResolveImage,
|
||||
PullLayers,
|
||||
BuildFilesystem,
|
||||
ExecuteAnalyzers,
|
||||
ComposeArtifacts,
|
||||
Entropy,
|
||||
EmitReports,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -85,6 +85,7 @@ builder.Services.AddSingleton<IScanStageExecutor, RegistrySecretStageExecutor>()
|
||||
builder.Services.AddSingleton<IScanStageExecutor, AnalyzerStageExecutor>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, Reachability.ReachabilityBuildStageExecutor>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, Reachability.ReachabilityPublishStageExecutor>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, Entropy.EntropyStageExecutor>();
|
||||
|
||||
builder.Services.AddSingleton<ScannerWorkerHostedService>();
|
||||
builder.Services.AddHostedService(sp => sp.GetRequiredService<ScannerWorkerHostedService>());
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Entropy;
|
||||
|
||||
/// <summary>
|
||||
/// Computes sliding-window Shannon entropy for byte buffers.
|
||||
/// Offline-friendly and deterministic: no allocations beyond histogram buffer and result list.
|
||||
/// </summary>
|
||||
public static class EntropyCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Computes entropy windows over the supplied buffer.
|
||||
/// </summary>
|
||||
/// <param name="data">Input bytes.</param>
|
||||
/// <param name="windowSize">Window length in bytes (default 4096).</param>
|
||||
/// <param name="stride">Step between windows in bytes (default 1024).</param>
|
||||
/// <returns>List of entropy windows (offset, length, entropy bits/byte).</returns>
|
||||
public static IReadOnlyList<EntropyWindow> Compute(ReadOnlySpan<byte> data, int windowSize = 4096, int stride = 1024)
|
||||
{
|
||||
if (windowSize <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(windowSize), "Window size must be positive.");
|
||||
}
|
||||
|
||||
if (stride <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(stride), "Stride must be positive.");
|
||||
}
|
||||
|
||||
var results = new List<EntropyWindow>();
|
||||
if (data.IsEmpty || data.Length < windowSize)
|
||||
{
|
||||
return results;
|
||||
}
|
||||
|
||||
// Reuse histogram buffer; fixed length for byte values.
|
||||
Span<int> histogram = stackalloc int[256];
|
||||
var end = data.Length - windowSize;
|
||||
|
||||
// Seed histogram for first window.
|
||||
for (var i = 0; i < windowSize; i++)
|
||||
{
|
||||
histogram[data[i]]++;
|
||||
}
|
||||
|
||||
AppendEntropy(results, 0, windowSize, histogram, windowSize);
|
||||
|
||||
// Slide window with rolling histogram updates to avoid re-scanning the buffer.
|
||||
for (var offset = stride; offset <= end; offset += stride)
|
||||
{
|
||||
var removeStart = offset - stride;
|
||||
var removeEnd = removeStart + stride;
|
||||
for (var i = removeStart; i < removeEnd; i++)
|
||||
{
|
||||
histogram[data[i]]--;
|
||||
}
|
||||
|
||||
var addStart = offset + windowSize - stride;
|
||||
var addEnd = offset + windowSize;
|
||||
for (var i = addStart; i < addEnd; i++)
|
||||
{
|
||||
histogram[data[i]]++;
|
||||
}
|
||||
|
||||
AppendEntropy(results, offset, windowSize, histogram, windowSize);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static void AppendEntropy(ICollection<EntropyWindow> results, int offset, int length, ReadOnlySpan<int> histogram, int totalCount)
|
||||
{
|
||||
double entropy = 0;
|
||||
for (var i = 0; i < 256; i++)
|
||||
{
|
||||
var count = histogram[i];
|
||||
if (count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var p = (double)count / totalCount;
|
||||
entropy -= p * Math.Log(p, 2);
|
||||
}
|
||||
|
||||
results.Add(new EntropyWindow(offset, length, entropy));
|
||||
}
|
||||
}
|
||||
|
||||
public readonly record struct EntropyWindow(int Offset, int Length, double Entropy);
|
||||
@@ -0,0 +1,107 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Entropy;
|
||||
|
||||
/// <summary>
|
||||
/// Builds per-file entropy reports and aggregates layer-level opaque ratios.
|
||||
/// Keeps logic deterministic and offline-friendly.
|
||||
/// </summary>
|
||||
public sealed class EntropyReportBuilder
|
||||
{
|
||||
private readonly int _windowSize;
|
||||
private readonly int _stride;
|
||||
private readonly double _opaqueThreshold;
|
||||
private readonly double _opaqueFileRatioFlag;
|
||||
|
||||
public EntropyReportBuilder(
|
||||
int windowSize = 4096,
|
||||
int stride = 1024,
|
||||
double opaqueThreshold = 7.2,
|
||||
double opaqueFileRatioFlag = 0.30)
|
||||
{
|
||||
if (windowSize <= 0) throw new ArgumentOutOfRangeException(nameof(windowSize));
|
||||
if (stride <= 0) throw new ArgumentOutOfRangeException(nameof(stride));
|
||||
if (opaqueThreshold <= 0) throw new ArgumentOutOfRangeException(nameof(opaqueThreshold));
|
||||
if (opaqueFileRatioFlag < 0 || opaqueFileRatioFlag > 1) throw new ArgumentOutOfRangeException(nameof(opaqueFileRatioFlag));
|
||||
|
||||
_windowSize = windowSize;
|
||||
_stride = stride;
|
||||
_opaqueThreshold = opaqueThreshold;
|
||||
_opaqueFileRatioFlag = opaqueFileRatioFlag;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a file-level entropy report.
|
||||
/// </summary>
|
||||
public EntropyFileReport BuildFile(string path, ReadOnlySpan<byte> data, IEnumerable<string>? flags = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(path);
|
||||
|
||||
var windows = EntropyCalculator
|
||||
.Compute(data, _windowSize, _stride)
|
||||
.Select(w => new EntropyFileWindow(w.Offset, w.Length, w.Entropy))
|
||||
.ToList();
|
||||
|
||||
var opaqueBytes = windows
|
||||
.Where(w => w.Entropy >= _opaqueThreshold)
|
||||
.Sum(w => (long)w.Length);
|
||||
|
||||
var size = data.Length;
|
||||
var ratio = size == 0 ? 0d : (double)opaqueBytes / size;
|
||||
|
||||
var fileFlags = new List<string>();
|
||||
if (flags is not null)
|
||||
{
|
||||
fileFlags.AddRange(flags.Where(f => !string.IsNullOrWhiteSpace(f)).Select(f => f.Trim()));
|
||||
}
|
||||
|
||||
if (ratio >= _opaqueFileRatioFlag)
|
||||
{
|
||||
fileFlags.Add("opaque-high");
|
||||
}
|
||||
|
||||
return new EntropyFileReport(
|
||||
Path: path,
|
||||
Size: size,
|
||||
OpaqueBytes: opaqueBytes,
|
||||
OpaqueRatio: ratio,
|
||||
Flags: fileFlags,
|
||||
Windows: windows);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Aggregates layer-level opaque ratios and returns an image-level ratio.
|
||||
/// </summary>
|
||||
public (EntropyLayerSummary Layer, double ImageOpaqueRatio) BuildLayerSummary(
|
||||
string layerDigest,
|
||||
IEnumerable<EntropyFileReport> fileReports,
|
||||
long layerTotalBytes,
|
||||
double imageOpaqueBytes,
|
||||
double imageTotalBytes)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(fileReports);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(layerDigest);
|
||||
|
||||
var files = fileReports.ToList();
|
||||
var opaqueBytes = files.Sum(f => f.OpaqueBytes);
|
||||
var indicators = new List<string>();
|
||||
if (files.Any(f => f.Flags.Contains("opaque-high", StringComparer.OrdinalIgnoreCase)))
|
||||
{
|
||||
indicators.Add("packed-like");
|
||||
}
|
||||
|
||||
var layerRatio = layerTotalBytes <= 0 ? 0d : (double)opaqueBytes / layerTotalBytes;
|
||||
var imageRatio = imageTotalBytes <= 0 ? 0d : imageOpaqueBytes / imageTotalBytes;
|
||||
|
||||
var summary = new EntropyLayerSummary(
|
||||
LayerDigest: layerDigest,
|
||||
OpaqueBytes: opaqueBytes,
|
||||
TotalBytes: layerTotalBytes,
|
||||
OpaqueRatio: layerRatio,
|
||||
Indicators: indicators);
|
||||
|
||||
return (summary, imageRatio);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Entropy;
|
||||
|
||||
public sealed record EntropyFileWindow(int Offset, int Length, double EntropyBits);
|
||||
|
||||
public sealed record EntropyFileReport(
|
||||
string Path,
|
||||
long Size,
|
||||
long OpaqueBytes,
|
||||
double OpaqueRatio,
|
||||
IReadOnlyList<string> Flags,
|
||||
IReadOnlyList<EntropyFileWindow> Windows);
|
||||
|
||||
public sealed record EntropyLayerSummary(
|
||||
string LayerDigest,
|
||||
long OpaqueBytes,
|
||||
long TotalBytes,
|
||||
double OpaqueRatio,
|
||||
IReadOnlyList<string> Indicators);
|
||||
|
||||
public sealed record EntropyReport(
|
||||
string ImageDigest,
|
||||
string LayerDigest,
|
||||
IReadOnlyList<EntropyFileReport> Files,
|
||||
double ImageOpaqueRatio);
|
||||
@@ -0,0 +1,98 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using StellaOps.Replay.Core;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Replay;
|
||||
|
||||
/// <summary>
|
||||
/// Assembles replay run metadata and bundle records from scanner artifacts.
|
||||
/// </summary>
|
||||
public sealed class RecordModeAssembler
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public RecordModeAssembler(TimeProvider? timeProvider = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public ReplayRunRecord BuildRun(
|
||||
string scanId,
|
||||
ReplayManifest manifest,
|
||||
string sbomDigest,
|
||||
string findingsDigest,
|
||||
string? vexDigest = null,
|
||||
string? logDigest = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sbomDigest);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(findingsDigest);
|
||||
|
||||
var now = _timeProvider.GetUtcNow().UtcDateTime;
|
||||
var manifestHash = "sha256:" + manifest.ComputeCanonicalSha256();
|
||||
|
||||
return new ReplayRunRecord
|
||||
{
|
||||
Id = scanId,
|
||||
ManifestHash = manifestHash,
|
||||
Status = "pending",
|
||||
CreatedAt = now,
|
||||
UpdatedAt = now,
|
||||
Outputs = new ReplayRunOutputs
|
||||
{
|
||||
Sbom = NormalizeDigest(sbomDigest),
|
||||
Findings = NormalizeDigest(findingsDigest),
|
||||
Vex = NormalizeOptionalDigest(vexDigest),
|
||||
Log = NormalizeOptionalDigest(logDigest)
|
||||
},
|
||||
Signatures = new List<ReplaySignatureRecord>()
|
||||
};
|
||||
}
|
||||
|
||||
public IReadOnlyList<ReplayBundleRecord> BuildBundles(
|
||||
ReplayBundleWriteResult inputBundle,
|
||||
ReplayBundleWriteResult outputBundle,
|
||||
IEnumerable<(ReplayBundleWriteResult Result, string Type)>? additionalBundles = null)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow().UtcDateTime;
|
||||
|
||||
var records = new List<ReplayBundleRecord>
|
||||
{
|
||||
ToBundleRecord(inputBundle, "input", now),
|
||||
ToBundleRecord(outputBundle, "output", now)
|
||||
};
|
||||
|
||||
if (additionalBundles != null)
|
||||
{
|
||||
records.AddRange(additionalBundles.Select(b => ToBundleRecord(b.Result, b.Type, now)));
|
||||
}
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
private static ReplayBundleRecord ToBundleRecord(ReplayBundleWriteResult result, string type, DateTime createdAt)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(result);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(type);
|
||||
|
||||
return new ReplayBundleRecord
|
||||
{
|
||||
Id = result.ZstSha256,
|
||||
Type = type.Trim().ToLowerInvariant(),
|
||||
Size = result.ZstBytes,
|
||||
Location = result.CasUri,
|
||||
CreatedAt = createdAt
|
||||
};
|
||||
}
|
||||
|
||||
private static string NormalizeDigest(string digest)
|
||||
{
|
||||
var trimmed = digest.Trim().ToLowerInvariant();
|
||||
return trimmed.StartsWith("sha256:", StringComparison.Ordinal) ? trimmed : $"sha256:{trimmed}";
|
||||
}
|
||||
|
||||
private static string? NormalizeOptionalDigest(string? digest)
|
||||
=> string.IsNullOrWhiteSpace(digest) ? null : NormalizeDigest(digest);
|
||||
}
|
||||
@@ -14,5 +14,6 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using StellaOps.Scanner.Core.Entropy;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Entropy;
|
||||
|
||||
public class EntropyCalculatorTests
|
||||
{
|
||||
[Fact]
|
||||
public void Compute_ReturnsEmpty_WhenBufferTooSmall()
|
||||
{
|
||||
var result = EntropyCalculator.Compute(new byte[10], windowSize: 32, stride: 8);
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compute_ProducesZeroEntropy_ForConstantData()
|
||||
{
|
||||
var data = Enumerable.Repeat((byte)0xAA, 4096 * 2).ToArray();
|
||||
|
||||
var windows = EntropyCalculator.Compute(data, windowSize: 4096, stride: 1024);
|
||||
|
||||
Assert.NotEmpty(windows);
|
||||
Assert.All(windows, w => Assert.InRange(w.Entropy, 0, 0.0001));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compute_DetectsHighEntropy_ForRandomBytes()
|
||||
{
|
||||
var rng = new Random(1234);
|
||||
var data = new byte[8192];
|
||||
rng.NextBytes(data);
|
||||
|
||||
var windows = EntropyCalculator.Compute(data, windowSize: 4096, stride: 1024);
|
||||
|
||||
Assert.NotEmpty(windows);
|
||||
Assert.All(windows, w => Assert.InRange(w.Entropy, 7.0, 8.1));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using StellaOps.Scanner.Core.Entropy;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Entropy;
|
||||
|
||||
public class EntropyReportBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void BuildFile_FlagsOpaqueHigh_WhenRatioExceedsThreshold()
|
||||
{
|
||||
var builder = new EntropyReportBuilder(windowSize: 4, stride: 4, opaqueThreshold: 1.0, opaqueFileRatioFlag: 0.25);
|
||||
// Alternating bytes produce high entropy in every window.
|
||||
var data = Enumerable.Range(0, 64).Select(i => (byte)(i % 2)).ToArray();
|
||||
|
||||
var report = builder.BuildFile("/bin/demo", data);
|
||||
|
||||
Assert.Contains("opaque-high", report.Flags);
|
||||
Assert.True(report.OpaqueRatio > 0.25);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildFile_RespectsProvidedFlags()
|
||||
{
|
||||
var builder = new EntropyReportBuilder(windowSize: 8, stride: 8, opaqueThreshold: 7.0, opaqueFileRatioFlag: 0.90);
|
||||
var data = new byte[64];
|
||||
|
||||
var report = builder.BuildFile("/bin/zero", data, new[] { "stripped", "", "debug-missing" });
|
||||
|
||||
Assert.Contains("stripped", report.Flags);
|
||||
Assert.Contains("debug-missing", report.Flags);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildLayerSummary_ComputesRatios()
|
||||
{
|
||||
var builder = new EntropyReportBuilder(windowSize: 4, stride: 4, opaqueThreshold: 1.0, opaqueFileRatioFlag: 0.25);
|
||||
var data = Enumerable.Range(0, 64).Select(i => (byte)(i % 2)).ToArray();
|
||||
var file = builder.BuildFile("/bin/demo", data);
|
||||
|
||||
var (summary, imageRatio) = builder.BuildLayerSummary(
|
||||
"sha256:layer",
|
||||
new[] { file },
|
||||
layerTotalBytes: 64,
|
||||
imageOpaqueBytes: file.OpaqueBytes,
|
||||
imageTotalBytes: 128);
|
||||
|
||||
Assert.Equal("sha256:layer", summary.LayerDigest);
|
||||
Assert.InRange(summary.OpaqueRatio, 0.25, 1.0);
|
||||
Assert.InRange(imageRatio, 0.0, 1.0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
using System;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.Core.Replay;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Replay;
|
||||
|
||||
public sealed class RecordModeAssemblerTests
|
||||
{
|
||||
[Fact]
|
||||
public void BuildRun_ComputesManifestHashAndOutputs()
|
||||
{
|
||||
var manifest = new ReplayManifest
|
||||
{
|
||||
Scan = new ReplayScanMetadata { Id = "scan-1", Time = DateTimeOffset.UnixEpoch }
|
||||
};
|
||||
|
||||
var assembler = new RecordModeAssembler(new FixedTimeProvider(new DateTimeOffset(2025, 11, 25, 12, 0, 0, TimeSpan.Zero)));
|
||||
|
||||
var run = assembler.BuildRun("scan-1", manifest, "sha256:sbom", "findings-digest", vexDigest: "sha256:vex");
|
||||
|
||||
run.Id.Should().Be("scan-1");
|
||||
run.ManifestHash.Should().StartWith("sha256:");
|
||||
run.CreatedAt.Should().Be(new DateTime(2025, 11, 25, 12, 0, 0, DateTimeKind.Utc));
|
||||
run.Outputs.Sbom.Should().Be("sha256:sbom");
|
||||
run.Outputs.Findings.Should().Be("sha256:findings-digest");
|
||||
run.Outputs.Vex.Should().Be("sha256:vex");
|
||||
run.Status.Should().Be("pending");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildBundles_ProducesDeterministicRecords()
|
||||
{
|
||||
var assembler = new RecordModeAssembler(new FixedTimeProvider(DateTimeOffset.UnixEpoch));
|
||||
|
||||
var input = new ReplayBundleWriteResult("tar1", "z1", 10, 20, "cas://replay/zz/z1.tar.zst");
|
||||
var output = new ReplayBundleWriteResult("tar2", "z2", 30, 40, "cas://replay/aa/z2.tar.zst");
|
||||
|
||||
var bundles = assembler.BuildBundles(input, output);
|
||||
|
||||
bundles.Should().HaveCount(2);
|
||||
bundles[0].Id.Should().Be("z1");
|
||||
bundles[0].Type.Should().Be("input");
|
||||
bundles[1].Id.Should().Be("z2");
|
||||
bundles[1].Location.Should().Be("cas://replay/aa/z2.tar.zst");
|
||||
bundles[0].CreatedAt.Should().Be(DateTime.UnixEpoch);
|
||||
}
|
||||
|
||||
private sealed class FixedTimeProvider : TimeProvider
|
||||
{
|
||||
private readonly DateTimeOffset _utc;
|
||||
public FixedTimeProvider(DateTimeOffset utc) => _utc = utc;
|
||||
public override DateTimeOffset GetUtcNow() => _utc;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net.Http.Json;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Replay;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed partial class ScansEndpointsTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task RecordModeService_AttachesReplayAndSurfacedInStatus()
|
||||
{
|
||||
using var secrets = new TestSurfaceSecretsScope();
|
||||
using var factory = new ScannerApplicationFactory(cfg =>
|
||||
{
|
||||
cfg["scanner:authority:enabled"] = "false";
|
||||
});
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var submitResponse = await client.PostAsJsonAsync("/api/v1/scans", new
|
||||
{
|
||||
image = new { digest = "sha256:demo" }
|
||||
});
|
||||
submitResponse.EnsureSuccessStatusCode();
|
||||
|
||||
var submitPayload = await submitResponse.Content.ReadFromJsonAsync<ScanSubmitResponse>();
|
||||
Assert.NotNull(submitPayload);
|
||||
var scanId = submitPayload!.ScanId;
|
||||
|
||||
using var scope = factory.Services.CreateScope();
|
||||
var coordinator = scope.ServiceProvider.GetRequiredService<IScanCoordinator>();
|
||||
var recordMode = scope.ServiceProvider.GetRequiredService<IRecordModeService>();
|
||||
var timeProvider = scope.ServiceProvider.GetRequiredService<TimeProvider>();
|
||||
|
||||
var manifest = new ReplayManifest
|
||||
{
|
||||
Scan = new ReplayScanMetadata
|
||||
{
|
||||
Id = scanId,
|
||||
Time = timeProvider.GetUtcNow()
|
||||
}
|
||||
};
|
||||
|
||||
var replay = await recordMode.AttachAsync(
|
||||
new ScanId(scanId),
|
||||
manifest,
|
||||
new ReplayBundleWriteResult("tar1", "z1", 128, 64, "cas://replay/z1.tar.zst"),
|
||||
new ReplayBundleWriteResult("tar2", "z2", 256, 96, "cas://replay/z2.tar.zst"),
|
||||
sbomDigest: "sha256:sbom",
|
||||
findingsDigest: "findings-digest",
|
||||
coordinator: coordinator,
|
||||
additionalBundles: new[]
|
||||
{
|
||||
(new ReplayBundleWriteResult("tar3", "z3", 1, 2, "cas://replay/z3.tar.zst"), "reachability")
|
||||
});
|
||||
|
||||
Assert.NotNull(replay);
|
||||
|
||||
var status = await client.GetFromJsonAsync<ScanStatusResponse>($"/api/v1/scans/{scanId}");
|
||||
Assert.NotNull(status);
|
||||
Assert.NotNull(status!.Replay);
|
||||
Assert.Equal(replay!.ManifestHash, status.Replay!.ManifestHash);
|
||||
Assert.Equal(3, status.Replay!.Bundles.Count);
|
||||
Assert.Contains(status.Replay!.Bundles, b => b.Type == "reachability");
|
||||
Assert.All(status.Replay!.Bundles, b => Assert.StartsWith("sha256:", b.Digest, StringComparison.Ordinal));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Entropy;
|
||||
using StellaOps.Scanner.Worker.Processing;
|
||||
using StellaOps.Scanner.Worker.Processing.Entropy;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public class EntropyStageExecutorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ExecuteAsync_WritesEntropyReportAndSummary()
|
||||
{
|
||||
// Arrange: create a temp file with random bytes to yield high entropy.
|
||||
var tmp = Path.GetTempFileName();
|
||||
var rng = new Random(1234);
|
||||
var bytes = new byte[64 * 1024];
|
||||
rng.NextBytes(bytes);
|
||||
File.WriteAllBytes(tmp, bytes);
|
||||
|
||||
var fileEntries = new List<ScanFileEntry>
|
||||
{
|
||||
new ScanFileEntry(tmp, sizeBytes: bytes.LongLength, kind: "blob", metadata: new Dictionary<string, string>())
|
||||
};
|
||||
|
||||
var lease = new StubLease("job-1", "scan-1", imageDigest: "sha256:test", layerDigest: "sha256:layer");
|
||||
var context = new ScanJobContext(lease, TimeProvider.System, DateTimeOffset.UtcNow, CancellationToken.None);
|
||||
context.Analysis.Set(ScanAnalysisKeys.FileEntries, (IReadOnlyList<ScanFileEntry>)fileEntries);
|
||||
|
||||
var executor = new EntropyStageExecutor(NullLogger<EntropyStageExecutor>.Instance);
|
||||
|
||||
// Act
|
||||
await executor.ExecuteAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.True(context.Analysis.TryGet<EntropyReport>(ScanAnalysisKeys.EntropyReport, out var report));
|
||||
Assert.NotNull(report);
|
||||
Assert.Equal("sha256:layer", report!.LayerDigest);
|
||||
Assert.NotEmpty(report.Files);
|
||||
|
||||
Assert.True(context.Analysis.TryGet<EntropyLayerSummary>(ScanAnalysisKeys.EntropyLayerSummary, out var summary));
|
||||
Assert.NotNull(summary);
|
||||
Assert.Equal("sha256:layer", summary!.LayerDigest);
|
||||
}
|
||||
|
||||
private sealed class StubLease : IScanJobLease
|
||||
{
|
||||
public StubLease(string jobId, string scanId, string imageDigest, string layerDigest)
|
||||
{
|
||||
JobId = jobId;
|
||||
ScanId = scanId;
|
||||
ImageDigest = imageDigest;
|
||||
LayerDigest = layerDigest;
|
||||
}
|
||||
|
||||
public string JobId { get; }
|
||||
public string ScanId { get; }
|
||||
public string? ImageDigest { get; }
|
||||
public string? LayerDigest { get; }
|
||||
}
|
||||
}
|
||||
@@ -31,6 +31,15 @@ public sealed class ReachabilityFactDocument
|
||||
[BsonIgnoreIfNull]
|
||||
public Dictionary<string, string?>? Metadata { get; set; }
|
||||
|
||||
[BsonElement("score")]
|
||||
public double Score { get; set; }
|
||||
|
||||
[BsonElement("unknownsCount")]
|
||||
public int UnknownsCount { get; set; }
|
||||
|
||||
[BsonElement("unknownsPressure")]
|
||||
public double UnknownsPressure { get; set; }
|
||||
|
||||
[BsonElement("computedAt")]
|
||||
public DateTimeOffset ComputedAt { get; set; }
|
||||
|
||||
@@ -50,6 +59,15 @@ public sealed class ReachabilityStateDocument
|
||||
[BsonElement("confidence")]
|
||||
public double Confidence { get; set; }
|
||||
|
||||
[BsonElement("bucket")]
|
||||
public string Bucket { get; set; } = "unknown";
|
||||
|
||||
[BsonElement("weight")]
|
||||
public double Weight { get; set; }
|
||||
|
||||
[BsonElement("score")]
|
||||
public double Score { get; set; }
|
||||
|
||||
[BsonElement("path")]
|
||||
public List<string> Path { get; set; } = new();
|
||||
|
||||
|
||||
@@ -10,4 +10,12 @@ public sealed record ReachabilityFactUpdatedEvent(
|
||||
int ReachableCount,
|
||||
int UnreachableCount,
|
||||
int RuntimeFactsCount,
|
||||
DateTimeOffset ComputedAtUtc);
|
||||
string Bucket,
|
||||
double Weight,
|
||||
int StateCount,
|
||||
double FactScore,
|
||||
int UnknownsCount,
|
||||
double UnknownsPressure,
|
||||
double AverageConfidence,
|
||||
DateTimeOffset ComputedAtUtc,
|
||||
string[] Targets);
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
using System;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization.Attributes;
|
||||
|
||||
namespace StellaOps.Signals.Models;
|
||||
|
||||
public sealed class UnknownSymbolDocument
|
||||
{
|
||||
[BsonId]
|
||||
[BsonRepresentation(BsonType.ObjectId)]
|
||||
public string Id { get; set; } = ObjectId.GenerateNewId().ToString();
|
||||
|
||||
[BsonElement("subjectKey")]
|
||||
[BsonRequired]
|
||||
public string SubjectKey { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("callgraphId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? CallgraphId { get; set; }
|
||||
|
||||
[BsonElement("symbolId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? SymbolId { get; set; }
|
||||
|
||||
[BsonElement("codeId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? CodeId { get; set; }
|
||||
|
||||
[BsonElement("purl")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Purl { get; set; }
|
||||
|
||||
[BsonElement("edgeFrom")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? EdgeFrom { get; set; }
|
||||
|
||||
[BsonElement("edgeTo")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? EdgeTo { get; set; }
|
||||
|
||||
[BsonElement("reason")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Reason { get; set; }
|
||||
|
||||
[BsonElement("createdAt")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
using System.Collections.Generic;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
|
||||
namespace StellaOps.Signals.Models;
|
||||
|
||||
public sealed class UnknownsIngestRequest
|
||||
{
|
||||
[Required]
|
||||
public ReachabilitySubject? Subject { get; set; }
|
||||
|
||||
[Required]
|
||||
public string CallgraphId { get; set; } = string.Empty;
|
||||
|
||||
[Required]
|
||||
public List<UnknownSymbolEntry> Unknowns { get; set; } = new();
|
||||
}
|
||||
|
||||
public sealed class UnknownSymbolEntry
|
||||
{
|
||||
public string? SymbolId { get; set; }
|
||||
public string? CodeId { get; set; }
|
||||
public string? Purl { get; set; }
|
||||
public string? EdgeFrom { get; set; }
|
||||
public string? EdgeTo { get; set; }
|
||||
public string? Reason { get; set; }
|
||||
}
|
||||
|
||||
public sealed class UnknownsIngestResponse
|
||||
{
|
||||
public string SubjectKey { get; init; } = string.Empty;
|
||||
public int UnknownsCount { get; init; }
|
||||
}
|
||||
@@ -26,6 +26,11 @@ public sealed class SignalsMongoOptions
|
||||
/// Collection name storing reachability facts.
|
||||
/// </summary>
|
||||
public string ReachabilityFactsCollection { get; set; } = "reachability_facts";
|
||||
|
||||
/// <summary>
|
||||
/// Collection name storing unresolved symbols/edges (Unknowns Registry).
|
||||
/// </summary>
|
||||
public string UnknownsCollection { get; set; } = "unknowns";
|
||||
|
||||
/// <summary>
|
||||
/// Validates the configured values.
|
||||
@@ -51,5 +56,10 @@ public sealed class SignalsMongoOptions
|
||||
{
|
||||
throw new InvalidOperationException("Signals reachability fact collection name must be configured.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(UnknownsCollection))
|
||||
{
|
||||
throw new InvalidOperationException("Signals unknowns collection name must be configured.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +32,24 @@ public sealed class SignalsScoringOptions
|
||||
/// </summary>
|
||||
public double MinConfidence { get; set; } = 0.05;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum fraction to subtract from overall fact score when unknowns are present.
|
||||
/// </summary>
|
||||
public double UnknownsPenaltyCeiling { get; set; } = 0.35;
|
||||
|
||||
/// <summary>
|
||||
/// Multipliers applied per reachability bucket. Keys are case-insensitive.
|
||||
/// Defaults mirror policy scoring config guidance in docs/11_DATA_SCHEMAS.md.
|
||||
/// </summary>
|
||||
public Dictionary<string, double> ReachabilityBuckets { get; } = new(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
{ "entrypoint", 1.0 },
|
||||
{ "direct", 0.85 },
|
||||
{ "runtime", 0.45 },
|
||||
{ "unknown", 0.5 },
|
||||
{ "unreachable", 0.0 }
|
||||
};
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
EnsurePercent(nameof(ReachableConfidence), ReachableConfidence);
|
||||
@@ -39,6 +57,11 @@ public sealed class SignalsScoringOptions
|
||||
EnsurePercent(nameof(RuntimeBonus), RuntimeBonus);
|
||||
EnsurePercent(nameof(MaxConfidence), MaxConfidence);
|
||||
EnsurePercent(nameof(MinConfidence), MinConfidence);
|
||||
EnsurePercent(nameof(UnknownsPenaltyCeiling), UnknownsPenaltyCeiling);
|
||||
foreach (var (key, value) in ReachabilityBuckets)
|
||||
{
|
||||
EnsurePercent($"ReachabilityBuckets[{key}]", value);
|
||||
}
|
||||
|
||||
if (MinConfidence > UnreachableConfidence)
|
||||
{
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user