Add PHP Analyzer Plugin and Composer Lock Data Handling
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Implemented the PhpAnalyzerPlugin to analyze PHP projects. - Created ComposerLockData class to represent data from composer.lock files. - Developed ComposerLockReader to load and parse composer.lock files asynchronously. - Introduced ComposerPackage class to encapsulate package details. - Added PhpPackage class to represent PHP packages with metadata and evidence. - Implemented PhpPackageCollector to gather packages from ComposerLockData. - Created PhpLanguageAnalyzer to perform analysis and emit results. - Added capability signals for known PHP frameworks and CMS. - Developed unit tests for the PHP language analyzer and its components. - Included sample composer.lock and expected output for testing. - Updated project files for the new PHP analyzer library and tests.
This commit is contained in:
@@ -1155,72 +1155,29 @@ internal static class CommandFactory
|
||||
var advise = new Command("advise", "Interact with Advisory AI pipelines.");
|
||||
_ = options;
|
||||
|
||||
var run = new Command("run", "Generate Advisory AI output for the specified task.");
|
||||
var taskArgument = new Argument<string>("task")
|
||||
var runOptions = CreateAdvisoryOptions();
|
||||
var runTaskArgument = new Argument<string>("task")
|
||||
{
|
||||
Description = "Task to run (summary, conflict, remediation)."
|
||||
};
|
||||
run.Add(taskArgument);
|
||||
|
||||
var advisoryKeyOption = new Option<string>("--advisory-key")
|
||||
{
|
||||
Description = "Advisory identifier to summarise (required).",
|
||||
Required = true
|
||||
};
|
||||
var artifactIdOption = new Option<string?>("--artifact-id")
|
||||
{
|
||||
Description = "Optional artifact identifier to scope SBOM context."
|
||||
};
|
||||
var artifactPurlOption = new Option<string?>("--artifact-purl")
|
||||
{
|
||||
Description = "Optional package URL to scope dependency context."
|
||||
};
|
||||
var policyVersionOption = new Option<string?>("--policy-version")
|
||||
{
|
||||
Description = "Policy revision to evaluate (defaults to current)."
|
||||
};
|
||||
var profileOption = new Option<string?>("--profile")
|
||||
{
|
||||
Description = "Advisory AI execution profile (default, fips-local, etc.)."
|
||||
};
|
||||
var sectionOption = new Option<string[]>("--section")
|
||||
{
|
||||
Description = "Preferred context sections to emphasise (repeatable).",
|
||||
Arity = ArgumentArity.ZeroOrMore
|
||||
};
|
||||
sectionOption.AllowMultipleArgumentsPerToken = true;
|
||||
|
||||
var forceRefreshOption = new Option<bool>("--force-refresh")
|
||||
{
|
||||
Description = "Bypass cached plan/output and recompute."
|
||||
};
|
||||
|
||||
var timeoutOption = new Option<int?>("--timeout")
|
||||
{
|
||||
Description = "Seconds to wait for generated output before timing out (0 = single attempt)."
|
||||
};
|
||||
timeoutOption.Arity = ArgumentArity.ZeroOrOne;
|
||||
|
||||
run.Add(advisoryKeyOption);
|
||||
run.Add(artifactIdOption);
|
||||
run.Add(artifactPurlOption);
|
||||
run.Add(policyVersionOption);
|
||||
run.Add(profileOption);
|
||||
run.Add(sectionOption);
|
||||
run.Add(forceRefreshOption);
|
||||
run.Add(timeoutOption);
|
||||
var run = new Command("run", "Generate Advisory AI output for the specified task.");
|
||||
run.Add(runTaskArgument);
|
||||
AddAdvisoryOptions(run, runOptions);
|
||||
|
||||
run.SetAction((parseResult, _) =>
|
||||
{
|
||||
var taskValue = parseResult.GetValue(taskArgument);
|
||||
var advisoryKey = parseResult.GetValue(advisoryKeyOption) ?? string.Empty;
|
||||
var artifactId = parseResult.GetValue(artifactIdOption);
|
||||
var artifactPurl = parseResult.GetValue(artifactPurlOption);
|
||||
var policyVersion = parseResult.GetValue(policyVersionOption);
|
||||
var profile = parseResult.GetValue(profileOption) ?? "default";
|
||||
var sections = parseResult.GetValue(sectionOption) ?? Array.Empty<string>();
|
||||
var forceRefresh = parseResult.GetValue(forceRefreshOption);
|
||||
var timeoutSeconds = parseResult.GetValue(timeoutOption) ?? 120;
|
||||
var taskValue = parseResult.GetValue(runTaskArgument);
|
||||
var advisoryKey = parseResult.GetValue(runOptions.AdvisoryKey) ?? string.Empty;
|
||||
var artifactId = parseResult.GetValue(runOptions.ArtifactId);
|
||||
var artifactPurl = parseResult.GetValue(runOptions.ArtifactPurl);
|
||||
var policyVersion = parseResult.GetValue(runOptions.PolicyVersion);
|
||||
var profile = parseResult.GetValue(runOptions.Profile) ?? "default";
|
||||
var sections = parseResult.GetValue(runOptions.Sections) ?? Array.Empty<string>();
|
||||
var forceRefresh = parseResult.GetValue(runOptions.ForceRefresh);
|
||||
var timeoutSeconds = parseResult.GetValue(runOptions.TimeoutSeconds) ?? 120;
|
||||
var outputFormat = ParseAdvisoryOutputFormat(parseResult.GetValue(runOptions.Format));
|
||||
var outputPath = parseResult.GetValue(runOptions.Output);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
if (!Enum.TryParse<AdvisoryAiTaskType>(taskValue, ignoreCase: true, out var taskType))
|
||||
@@ -1239,17 +1196,164 @@ internal static class CommandFactory
|
||||
sections,
|
||||
forceRefresh,
|
||||
timeoutSeconds,
|
||||
outputFormat,
|
||||
outputPath,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
var summarizeOptions = CreateAdvisoryOptions();
|
||||
var summarize = new Command("summarize", "Summarize an advisory with JSON/Markdown outputs and citations.");
|
||||
AddAdvisoryOptions(summarize, summarizeOptions);
|
||||
summarize.SetAction((parseResult, _) =>
|
||||
{
|
||||
var advisoryKey = parseResult.GetValue(summarizeOptions.AdvisoryKey) ?? string.Empty;
|
||||
var artifactId = parseResult.GetValue(summarizeOptions.ArtifactId);
|
||||
var artifactPurl = parseResult.GetValue(summarizeOptions.ArtifactPurl);
|
||||
var policyVersion = parseResult.GetValue(summarizeOptions.PolicyVersion);
|
||||
var profile = parseResult.GetValue(summarizeOptions.Profile) ?? "default";
|
||||
var sections = parseResult.GetValue(summarizeOptions.Sections) ?? Array.Empty<string>();
|
||||
var forceRefresh = parseResult.GetValue(summarizeOptions.ForceRefresh);
|
||||
var timeoutSeconds = parseResult.GetValue(summarizeOptions.TimeoutSeconds) ?? 120;
|
||||
var outputFormat = ParseAdvisoryOutputFormat(parseResult.GetValue(summarizeOptions.Format));
|
||||
var outputPath = parseResult.GetValue(summarizeOptions.Output);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleAdviseRunAsync(
|
||||
services,
|
||||
AdvisoryAiTaskType.Summary,
|
||||
advisoryKey,
|
||||
artifactId,
|
||||
artifactPurl,
|
||||
policyVersion,
|
||||
profile,
|
||||
sections,
|
||||
forceRefresh,
|
||||
timeoutSeconds,
|
||||
outputFormat,
|
||||
outputPath,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
advise.Add(run);
|
||||
advise.Add(summarize);
|
||||
return advise;
|
||||
}
|
||||
|
||||
private static AdvisoryCommandOptions CreateAdvisoryOptions()
|
||||
{
|
||||
var advisoryKey = new Option<string>("--advisory-key")
|
||||
{
|
||||
Description = "Advisory identifier to summarise (required).",
|
||||
Required = true
|
||||
};
|
||||
|
||||
var artifactId = new Option<string?>("--artifact-id")
|
||||
{
|
||||
Description = "Optional artifact identifier to scope SBOM context."
|
||||
};
|
||||
|
||||
var artifactPurl = new Option<string?>("--artifact-purl")
|
||||
{
|
||||
Description = "Optional package URL to scope dependency context."
|
||||
};
|
||||
|
||||
var policyVersion = new Option<string?>("--policy-version")
|
||||
{
|
||||
Description = "Policy revision to evaluate (defaults to current)."
|
||||
};
|
||||
|
||||
var profile = new Option<string?>("--profile")
|
||||
{
|
||||
Description = "Advisory AI execution profile (default, fips-local, etc.)."
|
||||
};
|
||||
|
||||
var sections = new Option<string[]>("--section")
|
||||
{
|
||||
Description = "Preferred context sections to emphasise (repeatable).",
|
||||
Arity = ArgumentArity.ZeroOrMore
|
||||
};
|
||||
sections.AllowMultipleArgumentsPerToken = true;
|
||||
|
||||
var forceRefresh = new Option<bool>("--force-refresh")
|
||||
{
|
||||
Description = "Bypass cached plan/output and recompute."
|
||||
};
|
||||
|
||||
var timeoutSeconds = new Option<int?>("--timeout")
|
||||
{
|
||||
Description = "Seconds to wait for generated output before timing out (0 = single attempt)."
|
||||
};
|
||||
timeoutSeconds.Arity = ArgumentArity.ZeroOrOne;
|
||||
|
||||
var format = new Option<string?>("--format")
|
||||
{
|
||||
Description = "Output format: table (default), json, or markdown."
|
||||
};
|
||||
|
||||
var output = new Option<string?>("--output")
|
||||
{
|
||||
Description = "File path to write advisory output when using json/markdown formats."
|
||||
};
|
||||
|
||||
return new AdvisoryCommandOptions(
|
||||
advisoryKey,
|
||||
artifactId,
|
||||
artifactPurl,
|
||||
policyVersion,
|
||||
profile,
|
||||
sections,
|
||||
forceRefresh,
|
||||
timeoutSeconds,
|
||||
format,
|
||||
output);
|
||||
}
|
||||
|
||||
private static void AddAdvisoryOptions(Command command, AdvisoryCommandOptions options)
|
||||
{
|
||||
command.Add(options.AdvisoryKey);
|
||||
command.Add(options.ArtifactId);
|
||||
command.Add(options.ArtifactPurl);
|
||||
command.Add(options.PolicyVersion);
|
||||
command.Add(options.Profile);
|
||||
command.Add(options.Sections);
|
||||
command.Add(options.ForceRefresh);
|
||||
command.Add(options.TimeoutSeconds);
|
||||
command.Add(options.Format);
|
||||
command.Add(options.Output);
|
||||
}
|
||||
|
||||
private static AdvisoryOutputFormat ParseAdvisoryOutputFormat(string? formatValue)
|
||||
{
|
||||
var normalized = string.IsNullOrWhiteSpace(formatValue)
|
||||
? "table"
|
||||
: formatValue!.Trim().ToLowerInvariant();
|
||||
|
||||
return normalized switch
|
||||
{
|
||||
"json" => AdvisoryOutputFormat.Json,
|
||||
"markdown" => AdvisoryOutputFormat.Markdown,
|
||||
"md" => AdvisoryOutputFormat.Markdown,
|
||||
_ => AdvisoryOutputFormat.Table
|
||||
};
|
||||
}
|
||||
|
||||
private sealed record AdvisoryCommandOptions(
|
||||
Option<string> AdvisoryKey,
|
||||
Option<string?> ArtifactId,
|
||||
Option<string?> ArtifactPurl,
|
||||
Option<string?> PolicyVersion,
|
||||
Option<string?> Profile,
|
||||
Option<string[]> Sections,
|
||||
Option<bool> ForceRefresh,
|
||||
Option<int?> TimeoutSeconds,
|
||||
Option<string?> Format,
|
||||
Option<string?> Output);
|
||||
|
||||
private static Command BuildVulnCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
|
||||
{
|
||||
var vuln = new Command("vuln", "Explore vulnerability observations and overlays.");
|
||||
{
|
||||
var vuln = new Command("vuln", "Explore vulnerability observations and overlays.");
|
||||
|
||||
var observations = new Command("observations", "List raw advisory observations for overlay consumers.");
|
||||
|
||||
|
||||
@@ -448,6 +448,8 @@ internal static class CommandHandlers
|
||||
IReadOnlyList<string> preferredSections,
|
||||
bool forceRefresh,
|
||||
int timeoutSeconds,
|
||||
AdvisoryOutputFormat outputFormat,
|
||||
string? outputPath,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
@@ -542,7 +544,14 @@ internal static class CommandHandlers
|
||||
activity?.SetTag("stellaops.cli.advisory.cache_hit", output.PlanFromCache);
|
||||
logger.LogInformation("Advisory output ready (cache key {CacheKey}).", output.CacheKey);
|
||||
|
||||
RenderAdvisoryOutput(output);
|
||||
var rendered = RenderAdvisoryOutput(output, outputFormat);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(outputPath) && rendered is not null)
|
||||
{
|
||||
var fullPath = Path.GetFullPath(outputPath!);
|
||||
await File.WriteAllTextAsync(fullPath, rendered, cancellationToken).ConfigureAwait(false);
|
||||
logger.LogInformation("Advisory output written to {Path}.", fullPath);
|
||||
}
|
||||
|
||||
if (output.Guardrail.Blocked)
|
||||
{
|
||||
@@ -6326,7 +6335,113 @@ internal static class CommandHandlers
|
||||
}
|
||||
}
|
||||
|
||||
private static void RenderAdvisoryOutput(AdvisoryPipelineOutputModel output)
|
||||
private static string? RenderAdvisoryOutput(AdvisoryPipelineOutputModel output, AdvisoryOutputFormat format)
|
||||
{
|
||||
return format switch
|
||||
{
|
||||
AdvisoryOutputFormat.Json => RenderAdvisoryOutputJson(output),
|
||||
AdvisoryOutputFormat.Markdown => RenderAdvisoryOutputMarkdown(output),
|
||||
_ => RenderAdvisoryOutputTable(output)
|
||||
};
|
||||
}
|
||||
|
||||
private static string RenderAdvisoryOutputJson(AdvisoryPipelineOutputModel output)
|
||||
{
|
||||
return JsonSerializer.Serialize(output, new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
});
|
||||
}
|
||||
|
||||
private static string RenderAdvisoryOutputMarkdown(AdvisoryPipelineOutputModel output)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
builder.AppendLine($"# Advisory {output.TaskType} ({output.Profile})");
|
||||
builder.AppendLine();
|
||||
builder.AppendLine($"- Cache Key: `{output.CacheKey}`");
|
||||
builder.AppendLine($"- Generated: {output.GeneratedAtUtc.ToString(\"O\", CultureInfo.InvariantCulture)}");
|
||||
builder.AppendLine($"- Plan From Cache: {(output.PlanFromCache ? \"yes\" : \"no\")}");
|
||||
builder.AppendLine($"- Guardrail Blocked: {(output.Guardrail.Blocked ? \"yes\" : \"no\")}");
|
||||
builder.AppendLine();
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(output.Response))
|
||||
{
|
||||
builder.AppendLine("## Response");
|
||||
builder.AppendLine(output.Response.Trim());
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(output.Prompt))
|
||||
{
|
||||
builder.AppendLine("## Prompt (sanitized)");
|
||||
builder.AppendLine(output.Prompt.Trim());
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
if (output.Citations.Count > 0)
|
||||
{
|
||||
builder.AppendLine("## Citations");
|
||||
foreach (var citation in output.Citations.OrderBy(c => c.Index))
|
||||
{
|
||||
builder.AppendLine($"- [{citation.Index}] {citation.DocumentId} :: {citation.ChunkId}");
|
||||
}
|
||||
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
if (output.Metadata.Count > 0)
|
||||
{
|
||||
builder.AppendLine("## Output Metadata");
|
||||
foreach (var entry in output.Metadata.OrderBy(kvp => kvp.Key, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
builder.AppendLine($"- **{entry.Key}**: {entry.Value}");
|
||||
}
|
||||
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
if (output.Guardrail.Metadata.Count > 0)
|
||||
{
|
||||
builder.AppendLine("## Guardrail Metadata");
|
||||
foreach (var entry in output.Guardrail.Metadata.OrderBy(kvp => kvp.Key, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
builder.AppendLine($"- **{entry.Key}**: {entry.Value}");
|
||||
}
|
||||
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
if (output.Guardrail.Violations.Count > 0)
|
||||
{
|
||||
builder.AppendLine("## Guardrail Violations");
|
||||
foreach (var violation in output.Guardrail.Violations)
|
||||
{
|
||||
builder.AppendLine($"- `{violation.Code}`: {violation.Message}");
|
||||
}
|
||||
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
builder.AppendLine("## Provenance");
|
||||
builder.AppendLine($"- Input Digest: `{output.Provenance.InputDigest}`");
|
||||
builder.AppendLine($"- Output Hash: `{output.Provenance.OutputHash}`");
|
||||
|
||||
if (output.Provenance.Signatures.Count > 0)
|
||||
{
|
||||
foreach (var signature in output.Provenance.Signatures)
|
||||
{
|
||||
builder.AppendLine($"- Signature: `{signature}`");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.AppendLine("- Signature: none");
|
||||
}
|
||||
|
||||
return builder.ToString();
|
||||
}
|
||||
|
||||
private static string? RenderAdvisoryOutputTable(AdvisoryPipelineOutputModel output)
|
||||
{
|
||||
var console = AnsiConsole.Console;
|
||||
|
||||
@@ -6428,6 +6543,8 @@ internal static class CommandHandlers
|
||||
provenance.AddRow("Signatures", signatures);
|
||||
|
||||
console.Write(provenance);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static Table CreateKeyValueTable(string title, IReadOnlyDictionary<string, string> entries)
|
||||
|
||||
@@ -11,6 +11,13 @@ internal enum AdvisoryAiTaskType
|
||||
Remediation
|
||||
}
|
||||
|
||||
internal enum AdvisoryOutputFormat
|
||||
{
|
||||
Table,
|
||||
Json,
|
||||
Markdown
|
||||
}
|
||||
|
||||
internal sealed class AdvisoryPipelinePlanRequestModel
|
||||
{
|
||||
public AdvisoryAiTaskType TaskType { get; init; }
|
||||
|
||||
@@ -3,3 +3,4 @@
|
||||
| Task ID | State | Notes |
|
||||
| --- | --- | --- |
|
||||
| `SCANNER-CLI-0001` | DONE (2025-11-12) | Ruby verbs now consume the persisted `RubyPackageInventory`, warn when inventories are missing, and docs/tests were refreshed per Sprint 138. |
|
||||
| `CLI-AIAI-31-001` | DOING (2025-11-22) | Building `stella advise summarize` with JSON/Markdown outputs and citation rendering (Sprint 0201 CLI I). |
|
||||
|
||||
@@ -749,6 +749,8 @@ public sealed class CommandHandlersTests
|
||||
new[] { "impact", "impact " },
|
||||
forceRefresh: false,
|
||||
timeoutSeconds: 0,
|
||||
outputFormat: AdvisoryOutputFormat.Table,
|
||||
outputPath: null,
|
||||
verbose: false,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
@@ -777,6 +779,104 @@ public sealed class CommandHandlersTests
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HandleAdviseRunAsync_WritesMarkdownWithCitations()
|
||||
{
|
||||
var originalExit = Environment.ExitCode;
|
||||
var originalConsole = AnsiConsole.Console;
|
||||
using var tempDir = new TempDirectory();
|
||||
var outputPath = Path.Combine(tempDir.Path, "advisory.md");
|
||||
var testConsole = new TestConsole();
|
||||
|
||||
try
|
||||
{
|
||||
Environment.ExitCode = 0;
|
||||
AnsiConsole.Console = testConsole;
|
||||
|
||||
var planResponse = new AdvisoryPipelinePlanResponseModel
|
||||
{
|
||||
TaskType = AdvisoryAiTaskType.Summary.ToString(),
|
||||
CacheKey = "cache-markdown",
|
||||
PromptTemplate = "prompts/advisory/summary.liquid",
|
||||
Budget = new AdvisoryTaskBudgetModel
|
||||
{
|
||||
PromptTokens = 256,
|
||||
CompletionTokens = 64
|
||||
},
|
||||
Chunks = Array.Empty<PipelineChunkSummaryModel>(),
|
||||
Vectors = Array.Empty<PipelineVectorSummaryModel>(),
|
||||
Metadata = new Dictionary<string, string>()
|
||||
};
|
||||
|
||||
var outputResponse = new AdvisoryPipelineOutputModel
|
||||
{
|
||||
CacheKey = planResponse.CacheKey,
|
||||
TaskType = planResponse.TaskType,
|
||||
Profile = "default",
|
||||
Prompt = "Sanitized prompt",
|
||||
Response = "Rendered summary body.",
|
||||
Citations = new[]
|
||||
{
|
||||
new AdvisoryOutputCitationModel { Index = 1, DocumentId = "doc-9", ChunkId = "chunk-9" }
|
||||
},
|
||||
Metadata = new Dictionary<string, string>(),
|
||||
Guardrail = new AdvisoryOutputGuardrailModel
|
||||
{
|
||||
Blocked = false,
|
||||
SanitizedPrompt = "Sanitized prompt",
|
||||
Violations = Array.Empty<AdvisoryOutputGuardrailViolationModel>(),
|
||||
Metadata = new Dictionary<string, string>()
|
||||
},
|
||||
Provenance = new AdvisoryOutputProvenanceModel
|
||||
{
|
||||
InputDigest = "sha256:markdown-in",
|
||||
OutputHash = "sha256:markdown-out",
|
||||
Signatures = Array.Empty<string>()
|
||||
},
|
||||
GeneratedAtUtc = DateTimeOffset.Parse("2025-11-06T12:00:00Z", CultureInfo.InvariantCulture),
|
||||
PlanFromCache = false
|
||||
};
|
||||
|
||||
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null))
|
||||
{
|
||||
AdvisoryPlanResponse = planResponse,
|
||||
AdvisoryOutputResponse = outputResponse
|
||||
};
|
||||
|
||||
var provider = BuildServiceProvider(backend);
|
||||
|
||||
await CommandHandlers.HandleAdviseRunAsync(
|
||||
provider,
|
||||
AdvisoryAiTaskType.Summary,
|
||||
"ADV-4",
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
"default",
|
||||
Array.Empty<string>(),
|
||||
forceRefresh: false,
|
||||
timeoutSeconds: 0,
|
||||
outputFormat: AdvisoryOutputFormat.Markdown,
|
||||
outputPath: outputPath,
|
||||
verbose: false,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
var markdown = await File.ReadAllTextAsync(outputPath);
|
||||
Assert.Contains("Citations", markdown, StringComparison.OrdinalIgnoreCase);
|
||||
Assert.Contains("doc-9", markdown, StringComparison.OrdinalIgnoreCase);
|
||||
Assert.Contains("chunk-9", markdown, StringComparison.OrdinalIgnoreCase);
|
||||
Assert.True(File.Exists(outputPath));
|
||||
Assert.Contains("Rendered summary body", markdown, StringComparison.OrdinalIgnoreCase);
|
||||
Assert.Equal(0, Environment.ExitCode);
|
||||
Assert.Contains("Citations", testConsole.Output, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
finally
|
||||
{
|
||||
AnsiConsole.Console = originalConsole;
|
||||
Environment.ExitCode = originalExit;
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HandleAdviseRunAsync_ReturnsGuardrailExitCodeOnBlock()
|
||||
{
|
||||
@@ -855,6 +955,8 @@ public sealed class CommandHandlersTests
|
||||
Array.Empty<string>(),
|
||||
forceRefresh: true,
|
||||
timeoutSeconds: 0,
|
||||
outputFormat: AdvisoryOutputFormat.Table,
|
||||
outputPath: null,
|
||||
verbose: false,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
@@ -913,6 +1015,8 @@ public sealed class CommandHandlersTests
|
||||
Array.Empty<string>(),
|
||||
forceRefresh: false,
|
||||
timeoutSeconds: 0,
|
||||
outputFormat: AdvisoryOutputFormat.Table,
|
||||
outputPath: null,
|
||||
verbose: false,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
|
||||
@@ -189,6 +189,7 @@ internal static class AdvisoryLinksetNormalization
|
||||
var reason = key switch
|
||||
{
|
||||
"severity" => "severity-mismatch",
|
||||
var k when k.StartsWith("cvss", StringComparison.OrdinalIgnoreCase) => "cvss-mismatch",
|
||||
"ranges" => "affected-range-divergence",
|
||||
"references" => "reference-clash",
|
||||
"aliases" => "alias-inconsistency",
|
||||
|
||||
@@ -4,6 +4,8 @@ using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using StellaOps.Concelier.Models;
|
||||
|
||||
#pragma warning disable CS8620 // nullability mismatches guarded by explicit filtering
|
||||
|
||||
namespace StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
internal static class LinksetCorrelation
|
||||
@@ -109,19 +111,15 @@ internal static class LinksetCorrelation
|
||||
List<HashSet<string>> packageKeysPerInput = inputs
|
||||
.Select(i => i.Purls
|
||||
.Select(ExtractPackageKey)
|
||||
.Where(k => !string.IsNullOrEmpty(k))
|
||||
.Where(k => !string.IsNullOrWhiteSpace(k))
|
||||
.ToHashSet(StringComparer.Ordinal))
|
||||
.ToList();
|
||||
|
||||
var sharedPackages = packageKeysPerInput
|
||||
.Skip(1)
|
||||
.Aggregate(
|
||||
new HashSet<string>(packageKeysPerInput.First()!, StringComparer.Ordinal),
|
||||
(acc, next) =>
|
||||
{
|
||||
acc.IntersectWith(next!);
|
||||
return acc;
|
||||
});
|
||||
var sharedPackages = new HashSet<string>(packageKeysPerInput.FirstOrDefault() ?? new HashSet<string>(), StringComparer.Ordinal);
|
||||
foreach (var next in packageKeysPerInput.Skip(1))
|
||||
{
|
||||
sharedPackages.IntersectWith(next);
|
||||
}
|
||||
|
||||
if (sharedPackages.Count > 0)
|
||||
{
|
||||
@@ -140,12 +138,17 @@ internal static class LinksetCorrelation
|
||||
|
||||
private static IEnumerable<AdvisoryLinksetConflict> CollectRangeConflicts(
|
||||
IReadOnlyCollection<Input> inputs,
|
||||
HashSet<string> sharedPackages)
|
||||
HashSet<string?> sharedPackages)
|
||||
{
|
||||
var conflicts = new List<AdvisoryLinksetConflict>();
|
||||
|
||||
foreach (var package in sharedPackages)
|
||||
{
|
||||
if (package is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var values = inputs
|
||||
.SelectMany(i => i.Purls
|
||||
.Where(p => ExtractPackageKey(p) == package)
|
||||
@@ -169,6 +172,8 @@ internal static class LinksetCorrelation
|
||||
return conflicts;
|
||||
}
|
||||
|
||||
#pragma warning restore CS8620
|
||||
|
||||
private static bool HasExactPurlOverlap(IReadOnlyCollection<Input> inputs)
|
||||
{
|
||||
var first = inputs.First().Purls.ToHashSet(StringComparer.Ordinal);
|
||||
|
||||
@@ -28,4 +28,20 @@ public sealed class AdvisoryLinksetNormalizationConfidenceTests
|
||||
Assert.Equal("severity-mismatch", conflict.Reason);
|
||||
Assert.Contains("severity:mismatch", conflict.Values!);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromRawLinksetWithConfidence_EmitsCvssMismatchConflict()
|
||||
{
|
||||
var linkset = new RawLinkset
|
||||
{
|
||||
PackageUrls = ImmutableArray.Create("pkg:maven/com.acme/foo@2.0.0"),
|
||||
Notes = ImmutableDictionary.CreateRange(new[] { new KeyValuePair<string, string>("cvss_v3", "7.5/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H") })
|
||||
};
|
||||
|
||||
var (_, _, conflicts) = AdvisoryLinksetNormalization.FromRawLinksetWithConfidence(linkset);
|
||||
|
||||
var conflict = Assert.Single(conflicts);
|
||||
Assert.Equal("cvss-mismatch", conflict.Reason);
|
||||
Assert.Contains("cvss_v3:7.5/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", conflict.Values!);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,6 +96,7 @@ public sealed class AdvisoryObservationAggregationTests
|
||||
Assert.Contains(aggregate.Conflicts, c => c.Reason == "alias-inconsistency");
|
||||
Assert.Contains(aggregate.Conflicts, c => c.Reason == "affected-range-divergence");
|
||||
Assert.True(aggregate.Confidence is > 0.0 and < 1.0);
|
||||
Assert.All(aggregate.Conflicts, c => Assert.NotNull(c.SourceIds));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
@@ -33,8 +33,8 @@ public class AdvisoryObservationTransportWorkerTests
|
||||
"hash-1",
|
||||
DateTimeOffset.UtcNow,
|
||||
ReplayCursor: "cursor-1",
|
||||
supersedesId: null,
|
||||
traceId: "trace-1");
|
||||
SupersedesId: null,
|
||||
TraceId: "trace-1");
|
||||
|
||||
var outbox = new FakeOutbox(evt);
|
||||
var transport = new FakeTransport();
|
||||
|
||||
5
src/DevPortal/StellaOps.DevPortal.Site/.gitignore
vendored
Normal file
5
src/DevPortal/StellaOps.DevPortal.Site/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
node_modules
|
||||
.dist
|
||||
output
|
||||
.cache
|
||||
.DS_Store
|
||||
12
src/DevPortal/StellaOps.DevPortal.Site/TASKS.md
Normal file
12
src/DevPortal/StellaOps.DevPortal.Site/TASKS.md
Normal file
@@ -0,0 +1,12 @@
|
||||
# DevPortal Tasks · Sprint 0206.0001.0001
|
||||
|
||||
Keep this file in sync with `docs/implplan/SPRINT_0206_0001_0001_devportal.md`.
|
||||
|
||||
| Task ID | Status | Notes | Last Updated (UTC) |
|
||||
| --- | --- | --- | --- |
|
||||
| DEVPORT-62-001 | DOING | Select SSG, wire aggregate spec, nav/search scaffold. | 2025-11-22 |
|
||||
| DEVPORT-62-002 | TODO | Schema viewer, examples, copy-curl, version selector. | 2025-11-22 |
|
||||
| DEVPORT-63-001 | TODO | Try-It console against sandbox; token onboarding UX. | 2025-11-22 |
|
||||
| DEVPORT-63-002 | TODO | Embed SDK snippets/quick starts from tested examples. | 2025-11-22 |
|
||||
| DEVPORT-64-001 | TODO | Offline bundle target with specs + SDK archives; zero external assets. | 2025-11-22 |
|
||||
| DEVPORT-64-002 | TODO | Accessibility tests, link checker, performance budgets. | 2025-11-22 |
|
||||
69
src/DevPortal/StellaOps.DevPortal.Site/astro.config.mjs
Normal file
69
src/DevPortal/StellaOps.DevPortal.Site/astro.config.mjs
Normal file
@@ -0,0 +1,69 @@
|
||||
import { defineConfig } from 'astro/config';
|
||||
import mdx from '@astrojs/mdx';
|
||||
import starlight from '@astrojs/starlight';
|
||||
|
||||
export default defineConfig({
|
||||
site: 'https://devportal.stellaops.local',
|
||||
srcDir: 'src',
|
||||
outDir: 'dist',
|
||||
trailingSlash: 'never',
|
||||
integrations: [
|
||||
mdx(),
|
||||
starlight({
|
||||
title: 'StellaOps DevPortal',
|
||||
description: 'Deterministic, offline-first developer portal for the StellaOps platform.',
|
||||
favicon: {
|
||||
src: '/logo.svg',
|
||||
sizes: 'any',
|
||||
type: 'image/svg+xml',
|
||||
},
|
||||
logo: {
|
||||
src: '/logo.svg',
|
||||
alt: 'StellaOps DevPortal',
|
||||
},
|
||||
customCss: ['./src/styles/custom.css'],
|
||||
social: {
|
||||
github: 'https://git.stella-ops.org',
|
||||
},
|
||||
search: {
|
||||
provider: 'local',
|
||||
algolia: undefined,
|
||||
},
|
||||
sidebar: [
|
||||
{
|
||||
label: 'Overview',
|
||||
items: [
|
||||
{ slug: 'index' },
|
||||
{ slug: 'guides/getting-started' },
|
||||
{ slug: 'guides/navigation-search' },
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'API',
|
||||
items: [{ slug: 'api-reference' }],
|
||||
},
|
||||
{
|
||||
label: 'Roadmap',
|
||||
items: [{ slug: 'release-notes' }],
|
||||
},
|
||||
],
|
||||
tableOfContents: {
|
||||
minHeadingLevel: 2,
|
||||
maxHeadingLevel: 4,
|
||||
},
|
||||
pagination: true,
|
||||
editLink: {
|
||||
baseUrl: 'https://git.stella-ops.org/devportal',
|
||||
},
|
||||
head: [
|
||||
{
|
||||
tag: 'meta',
|
||||
attrs: {
|
||||
name: 'theme-color',
|
||||
content: '#0f172a',
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
8298
src/DevPortal/StellaOps.DevPortal.Site/package-lock.json
generated
Normal file
8298
src/DevPortal/StellaOps.DevPortal.Site/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
29
src/DevPortal/StellaOps.DevPortal.Site/package.json
Normal file
29
src/DevPortal/StellaOps.DevPortal.Site/package.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "@stellaops/devportal-site",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"engines": {
|
||||
"node": ">=18.18.0"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "astro dev",
|
||||
"start": "astro dev --host",
|
||||
"build": "astro build",
|
||||
"preview": "astro preview",
|
||||
"check": "astro check",
|
||||
"sync:spec": "node scripts/sync-spec.mjs",
|
||||
"prepare:static": "npm run sync:spec && astro check"
|
||||
},
|
||||
"dependencies": {
|
||||
"rapidoc": "9.3.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@astrojs/mdx": "4.3.12",
|
||||
"@astrojs/starlight": "0.36.2",
|
||||
"@types/node": "24.10.1",
|
||||
"astro": "5.16.0",
|
||||
"typescript": "5.9.3"
|
||||
}
|
||||
}
|
||||
1542
src/DevPortal/StellaOps.DevPortal.Site/public/api/stella.yaml
Normal file
1542
src/DevPortal/StellaOps.DevPortal.Site/public/api/stella.yaml
Normal file
File diff suppressed because it is too large
Load Diff
13
src/DevPortal/StellaOps.DevPortal.Site/public/logo.svg
Normal file
13
src/DevPortal/StellaOps.DevPortal.Site/public/logo.svg
Normal file
@@ -0,0 +1,13 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 200 200" role="img" aria-labelledby="title desc">
|
||||
<title id="title">StellaOps DevPortal</title>
|
||||
<desc id="desc">Stylised starburst mark for the StellaOps developer portal.</desc>
|
||||
<defs>
|
||||
<linearGradient id="g" x1="0%" x2="100%" y1="0%" y2="100%">
|
||||
<stop offset="0%" stop-color="#0ea5e9" />
|
||||
<stop offset="100%" stop-color="#22d3ee" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect width="200" height="200" rx="28" fill="#0b1220" />
|
||||
<path fill="url(#g)" d="M100 22l16 46h48l-39 28 15 46-40-27-40 27 15-46-39-28h48z"/>
|
||||
<circle cx="100" cy="100" r="16" fill="#0b1220" stroke="#22d3ee" stroke-width="6" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 679 B |
32
src/DevPortal/StellaOps.DevPortal.Site/scripts/sync-spec.mjs
Normal file
32
src/DevPortal/StellaOps.DevPortal.Site/scripts/sync-spec.mjs
Normal file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env node
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import crypto from 'node:crypto';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const moduleRoot = path.resolve(__dirname, '..');
|
||||
const repoRoot = path.resolve(moduleRoot, '..', '..', '..');
|
||||
const sourceSpec = path.join(repoRoot, 'src/Api/StellaOps.Api.OpenApi/stella.yaml');
|
||||
const targetDir = path.join(moduleRoot, 'public', 'api');
|
||||
const targetSpec = path.join(targetDir, 'stella.yaml');
|
||||
|
||||
function hashFile(filePath) {
|
||||
const hash = crypto.createHash('sha256');
|
||||
hash.update(fs.readFileSync(filePath));
|
||||
return hash.digest('hex');
|
||||
}
|
||||
|
||||
if (!fs.existsSync(sourceSpec)) {
|
||||
console.error(`[devportal:sync-spec] missing source spec at ${sourceSpec}`);
|
||||
process.exitCode = 1;
|
||||
process.exit();
|
||||
}
|
||||
|
||||
fs.mkdirSync(targetDir, { recursive: true });
|
||||
fs.copyFileSync(sourceSpec, targetSpec);
|
||||
|
||||
const sizeKb = (fs.statSync(targetSpec).size / 1024).toFixed(1);
|
||||
const digest = hashFile(targetSpec).slice(0, 12);
|
||||
console.log(`[devportal:sync-spec] copied aggregate spec -> public/api/stella.yaml (${sizeKb} KiB, sha256:${digest}...)`);
|
||||
17
src/DevPortal/StellaOps.DevPortal.Site/src/content/config.ts
Normal file
17
src/DevPortal/StellaOps.DevPortal.Site/src/content/config.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { defineCollection, z } from 'astro:content';
|
||||
|
||||
const docs = defineCollection({
|
||||
type: 'content',
|
||||
schema: z.object({
|
||||
title: z.string(),
|
||||
description: z.string().optional(),
|
||||
sidebar: z
|
||||
.object({
|
||||
label: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
order: z.number().optional(),
|
||||
}),
|
||||
});
|
||||
|
||||
export const collections = { docs };
|
||||
@@ -0,0 +1,37 @@
|
||||
---
|
||||
title: API Reference
|
||||
description: Aggregate OpenAPI surface for StellaOps services with schema-first navigation.
|
||||
---
|
||||
|
||||
import 'rapidoc/dist/rapidoc-min.js';
|
||||
|
||||
> The aggregate spec is composed from per-service OpenAPI files and namespaced by service (e.g., `/authority/...`). The bundled copy lives at `/api/stella.yaml` so offline builds stay self-contained.
|
||||
|
||||
<rapi-doc
|
||||
spec-url="/api/stella.yaml"
|
||||
render-style="read"
|
||||
theme="dark"
|
||||
bg-color="#0b1220"
|
||||
text-color="#e5e7eb"
|
||||
primary-color="#0ea5e9"
|
||||
nav-bg-color="#0f172a"
|
||||
nav-text-color="#cbd5e1"
|
||||
show-header="false"
|
||||
allow-try="false"
|
||||
allow-spec-url-load="false"
|
||||
allow-spec-file-load="false"
|
||||
regular-font="Space Grotesk"
|
||||
mono-font="JetBrains Mono"
|
||||
schema-style="tree"
|
||||
default-schema-tab="schema"
|
||||
sort-tags="true"
|
||||
sort-endpoints-by="path"
|
||||
hide-schema-titles="false"
|
||||
layout="row"
|
||||
style="height: 80vh; border: 1px solid #1f2937; border-radius: 12px;"
|
||||
></rapi-doc>
|
||||
|
||||
## What to look for
|
||||
- Per-operation `x-service` and `x-original-path` values expose provenance.
|
||||
- Shared schemas live under `#/components/schemas` with namespaced keys.
|
||||
- Servers list includes one entry per service; sandbox URLs will be added alongside prod.
|
||||
@@ -0,0 +1,38 @@
|
||||
---
|
||||
title: Getting Started
|
||||
description: Build and preview the DevPortal locally with deterministic inputs.
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
- Node.js 18.18 or later (offline-friendly install).
|
||||
- `npm install --package-lock-only` to capture the lockfile; `npm ci --progress=false` when you need a full install.
|
||||
- Aggregate OpenAPI file at `src/Api/StellaOps.Api.OpenApi/stella.yaml` (generated via `npm run api:compose` from the repo root).
|
||||
|
||||
## Build locally
|
||||
1. Sync the aggregate spec into the portal assets:
|
||||
```bash
|
||||
npm run sync:spec
|
||||
```
|
||||
2. Install dependencies (skips network analytics):
|
||||
```bash
|
||||
npm ci --ignore-scripts --progress=false --no-fund --no-audit
|
||||
```
|
||||
3. Run the site locally:
|
||||
```bash
|
||||
npm run dev -- --host
|
||||
```
|
||||
4. Generate a production bundle (offline-ready):
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
|
||||
## Determinism & offline posture
|
||||
- The portal never pulls fonts or JS from CDNs; all assets live under `public/`.
|
||||
- The aggregate spec is stored at `/api/stella.yaml` and is bundled into exports.
|
||||
- Search uses a local index generated at build time—no third-party calls.
|
||||
|
||||
## Where things live
|
||||
- Content: `src/content/docs/**`
|
||||
- Styling tokens: `src/styles/custom.css`
|
||||
- Spec sync helper: `scripts/sync-spec.mjs`
|
||||
- Build output: `dist/` (ready for static serving or offline export)
|
||||
@@ -0,0 +1,24 @@
|
||||
---
|
||||
title: Navigation & Search
|
||||
description: How the DevPortal organizes content and builds offline search indices.
|
||||
---
|
||||
|
||||
## Navigation model
|
||||
- **Overview** for narrative journeys and onboarding.
|
||||
- **API** for the aggregate OpenAPI viewer and schema-aware tools.
|
||||
- **Roadmap** for release notes and drop-specific changes.
|
||||
- Sidebar order is pinned in `astro.config.mjs` to keep builds deterministic.
|
||||
|
||||
## Search
|
||||
- Provider: **local** (FlexSearch) generated at build time.
|
||||
- Works offline; indexes titles, headings, and descriptions across docs.
|
||||
- Search box appears in the top nav. Keyboard shortcut: `/` (press in any page).
|
||||
|
||||
## Content guidelines
|
||||
- Every page must declare `title` and `description` frontmatter to land in the index.
|
||||
- Prefer short headings (≤60 characters) for clean search snippets.
|
||||
- Keep code examples deterministic: pin versions and avoid network calls.
|
||||
|
||||
## Upcoming
|
||||
- API operation deep-links will join the index once schema viewer (DEVPORT-62-002) lands.
|
||||
- Try-It console (DEVPORT-63-001) will expose a sandbox surface gated by scopes.
|
||||
@@ -0,0 +1,30 @@
|
||||
---
|
||||
title: Welcome to the StellaOps DevPortal
|
||||
description: Deterministic, offline-first documentation and API reference for the StellaOps platform.
|
||||
---
|
||||
|
||||
import { Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
The StellaOps DevPortal binds specs, runnable examples, and SDK entrypoints into a single, deterministic build. Everything here is designed to work online or fully air-gapped so auditors and engineers see the same evidence.
|
||||
|
||||
<CardGrid>
|
||||
<Card title="Aggregate API" icon="tabler:api" href="/docs/api-reference/">
|
||||
Browse the composed OpenAPI surface, schema-first paths, and auth expectations.
|
||||
</Card>
|
||||
<Card title="Get started" icon="tabler:flag" href="/docs/guides/getting-started/">
|
||||
Install tooling, sync the aggregate spec, and render the portal locally.
|
||||
</Card>
|
||||
<Card title="Navigation & search" icon="tabler:search" href="/docs/guides/navigation-search/">
|
||||
Learn how content is organized and how offline search works.
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Why now
|
||||
- Offline parity: the same portal ships as static HTML with bundled assets.
|
||||
- Deterministic rebuilds: aggregate spec and examples are pinned in-source.
|
||||
- Audit-ready: schema-first views, provenance attached to specs, and upcoming try-it sandbox.
|
||||
|
||||
## What lives here
|
||||
- Aggregate OpenAPI (namespaced by service) with schema explorer.
|
||||
- Guides for tokens, scopes, SDKs, and export bundles.
|
||||
- Release notes aligned to platform drops.
|
||||
@@ -0,0 +1,15 @@
|
||||
---
|
||||
title: Release Notes
|
||||
description: Drop-by-drop updates for the DevPortal surface.
|
||||
---
|
||||
|
||||
## 2025-11 (Sprint 0206.0001.0001)
|
||||
- ✅ Selected Astro + Starlight as the static site generator for deterministic offline builds.
|
||||
- ✅ Added navigation scaffolding (Overview, Guides, API, Roadmap) with local search enabled.
|
||||
- ✅ Embedded aggregate OpenAPI via RapiDoc using bundled `/api/stella.yaml`.
|
||||
- 🔜 Schema explorer UI and copy-curl snippets (DEVPORT-62-002).
|
||||
- 🔜 Try-It console against sandbox scopes (DEVPORT-63-001).
|
||||
|
||||
## How to contribute release entries
|
||||
- Add a dated section with bullet points grouped by task ID when features land.
|
||||
- Keep entries aligned to sprint IDs and include any risks or follow-ups.
|
||||
2
src/DevPortal/StellaOps.DevPortal.Site/src/env.d.ts
vendored
Normal file
2
src/DevPortal/StellaOps.DevPortal.Site/src/env.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/// <reference path="../.astro/types.d.ts" />
|
||||
/// <reference types="astro/client" />
|
||||
45
src/DevPortal/StellaOps.DevPortal.Site/src/styles/custom.css
Normal file
45
src/DevPortal/StellaOps.DevPortal.Site/src/styles/custom.css
Normal file
@@ -0,0 +1,45 @@
|
||||
:root {
|
||||
--sl-font-sans: "Space Grotesk", "Segoe UI", "Inter", system-ui, -apple-system, sans-serif;
|
||||
--sl-font-mono: "JetBrains Mono", "SFMono-Regular", ui-monospace, Menlo, Consolas, monospace;
|
||||
--sl-color-accent: #0ea5e9;
|
||||
--sl-color-text: #e5e7eb;
|
||||
--sl-color-text-accent: #a5f3fc;
|
||||
--sl-color-text-muted: #cbd5e1;
|
||||
--sl-color-bg: #0b1220;
|
||||
--sl-color-bg-soft: #0f172a;
|
||||
--sl-color-hairline: #1f2937;
|
||||
--sl-heading-font-weight: 700;
|
||||
--sl-body-font-weight: 400;
|
||||
}
|
||||
|
||||
body {
|
||||
background: radial-gradient(circle at 20% 20%, rgba(14, 165, 233, 0.12), transparent 25%),
|
||||
radial-gradient(circle at 80% 10%, rgba(99, 102, 241, 0.14), transparent 25%),
|
||||
linear-gradient(180deg, #0b1220 0%, #0f172a 60%, #0b1220 100%);
|
||||
color: var(--sl-color-text);
|
||||
}
|
||||
|
||||
.sl-link-card {
|
||||
border: 1px solid var(--sl-color-hairline);
|
||||
background: linear-gradient(180deg, rgba(255, 255, 255, 0.03), rgba(255, 255, 255, 0.01));
|
||||
box-shadow: 0 12px 40px rgba(0, 0, 0, 0.25);
|
||||
}
|
||||
|
||||
:where(.sl-markdown) h2 {
|
||||
letter-spacing: -0.02em;
|
||||
}
|
||||
|
||||
:where(.sl-markdown) code {
|
||||
background: rgba(15, 23, 42, 0.7);
|
||||
border: 1px solid var(--sl-color-hairline);
|
||||
}
|
||||
|
||||
nav.sl-topnav {
|
||||
border-bottom: 1px solid var(--sl-color-hairline);
|
||||
backdrop-filter: blur(10px);
|
||||
}
|
||||
|
||||
.sl-search-box input {
|
||||
background: rgba(255, 255, 255, 0.08);
|
||||
border: 1px solid var(--sl-color-hairline);
|
||||
}
|
||||
7
src/DevPortal/StellaOps.DevPortal.Site/tsconfig.json
Normal file
7
src/DevPortal/StellaOps.DevPortal.Site/tsconfig.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"extends": "astro/tsconfigs/strict",
|
||||
"compilerOptions": {
|
||||
"types": ["astro/client"],
|
||||
"baseUrl": "."
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Excititor.Core.Observations;
|
||||
|
||||
/// <summary>
|
||||
/// Minimal observation reference used in linkset updates while preserving Aggregation-Only semantics.
|
||||
/// </summary>
|
||||
public sealed record VexLinksetObservationRefCore(
|
||||
string ObservationId,
|
||||
string ProviderId,
|
||||
string Status,
|
||||
double? Confidence,
|
||||
ImmutableDictionary<string, string> Attributes)
|
||||
{
|
||||
public static VexLinksetObservationRefCore Create(
|
||||
string observationId,
|
||||
string providerId,
|
||||
string status,
|
||||
double? confidence,
|
||||
ImmutableDictionary<string, string>? attributes = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(observationId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(providerId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(status);
|
||||
|
||||
return new VexLinksetObservationRefCore(
|
||||
observationId.Trim(),
|
||||
providerId.Trim(),
|
||||
status.Trim(),
|
||||
confidence,
|
||||
attributes ?? ImmutableDictionary<string, string>.Empty);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,98 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.AirGap;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.Exports;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.InMemory;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.Merkle;
|
||||
using StellaOps.Findings.Ledger.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Tests;
|
||||
|
||||
public sealed class AirgapAndOrchestratorServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task AirgapImportService_AppendsLedgerEvent_AndPersistsRecord()
|
||||
{
|
||||
var ledgerRepo = new InMemoryLedgerEventRepository();
|
||||
var writeService = new LedgerEventWriteService(ledgerRepo, new NullMerkleAnchorScheduler(), NullLogger<LedgerEventWriteService>.Instance);
|
||||
var store = new InMemoryAirgapImportRepository();
|
||||
var service = new AirgapImportService(ledgerRepo, writeService, store, TimeProvider.System, NullLogger<AirgapImportService>.Instance);
|
||||
|
||||
var input = new AirgapImportInput(
|
||||
TenantId: "tenant-a",
|
||||
BundleId: "bundle-123",
|
||||
MirrorGeneration: "gen-1",
|
||||
MerkleRoot: "abc123",
|
||||
TimeAnchor: DateTimeOffset.Parse("2025-10-10T00:00:00Z"),
|
||||
Publisher: "mirror",
|
||||
HashAlgorithm: "sha256",
|
||||
Contents: new[] { "c1", "c2" },
|
||||
ImportOperator: "operator:alice");
|
||||
|
||||
var result = await service.RecordAsync(input, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.LedgerEventId);
|
||||
Assert.NotNull(store.LastRecord);
|
||||
Assert.Equal(input.BundleId, store.LastRecord!.BundleId);
|
||||
Assert.Equal(input.MirrorGeneration, store.LastRecord.MirrorGeneration);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OrchestratorExportService_ComputesMerkleRoot()
|
||||
{
|
||||
var repo = new InMemoryOrchestratorExportRepository();
|
||||
var service = new OrchestratorExportService(repo, TimeProvider.System, NullLogger<OrchestratorExportService>.Instance);
|
||||
var input = new OrchestratorExportInput(
|
||||
TenantId: "tenant-a",
|
||||
RunId: Guid.NewGuid(),
|
||||
JobType: "export-artifact",
|
||||
ArtifactHash: "sha256:artifact",
|
||||
PolicyHash: "sha256:policy",
|
||||
StartedAt: DateTimeOffset.Parse("2025-10-11T00:00:00Z"),
|
||||
CompletedAt: DateTimeOffset.Parse("2025-10-11T00:10:00Z"),
|
||||
Status: "succeeded",
|
||||
ManifestPath: "/exports/manifest.json",
|
||||
LogsPath: "/exports/logs.txt");
|
||||
|
||||
var record = await service.RecordAsync(input, CancellationToken.None);
|
||||
|
||||
Assert.NotNull(record);
|
||||
Assert.False(string.IsNullOrWhiteSpace(record.MerkleRoot));
|
||||
Assert.Equal(record.MerkleRoot, repo.LastRecord?.MerkleRoot);
|
||||
Assert.Equal(input.ArtifactHash, repo.LastRecord?.ArtifactHash);
|
||||
}
|
||||
|
||||
private sealed class InMemoryAirgapImportRepository : IAirgapImportRepository
|
||||
{
|
||||
public AirgapImportRecord? LastRecord { get; private set; }
|
||||
|
||||
public Task InsertAsync(AirgapImportRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
LastRecord = record;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class InMemoryOrchestratorExportRepository : IOrchestratorExportRepository
|
||||
{
|
||||
public OrchestratorExportRecord? LastRecord { get; private set; }
|
||||
|
||||
public Task InsertAsync(OrchestratorExportRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
LastRecord = record;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<OrchestratorExportRecord>> GetByArtifactAsync(string tenantId, string artifactHash, CancellationToken cancellationToken)
|
||||
{
|
||||
var list = new List<OrchestratorExportRecord>();
|
||||
if (LastRecord is not null && string.Equals(LastRecord.ArtifactHash, artifactHash, StringComparison.Ordinal))
|
||||
{
|
||||
list.Add(LastRecord);
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyList<OrchestratorExportRecord>>(list);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.WebService.Contracts;
|
||||
|
||||
public sealed record AirgapImportRequest
|
||||
{
|
||||
[JsonPropertyName("bundleId")]
|
||||
public required string BundleId { get; init; }
|
||||
|
||||
[JsonPropertyName("mirrorGeneration")]
|
||||
public string? MirrorGeneration { get; init; }
|
||||
|
||||
[JsonPropertyName("merkleRoot")]
|
||||
public required string MerkleRoot { get; init; }
|
||||
|
||||
[JsonPropertyName("timeAnchor")]
|
||||
public required DateTimeOffset TimeAnchor { get; init; }
|
||||
|
||||
[JsonPropertyName("publisher")]
|
||||
public string? Publisher { get; init; }
|
||||
|
||||
[JsonPropertyName("hashAlgorithm")]
|
||||
public string? HashAlgorithm { get; init; }
|
||||
|
||||
[JsonPropertyName("contents")]
|
||||
public string[] Contents { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("importOperator")]
|
||||
public string? ImportOperator { get; init; }
|
||||
}
|
||||
|
||||
public sealed record AirgapImportResponse(
|
||||
Guid ChainId,
|
||||
long? Sequence,
|
||||
Guid? LedgerEventId,
|
||||
string Status,
|
||||
string? Error);
|
||||
@@ -0,0 +1,37 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.WebService.Contracts;
|
||||
|
||||
public sealed record OrchestratorExportRequest
|
||||
{
|
||||
[JsonPropertyName("runId")]
|
||||
public required Guid RunId { get; init; }
|
||||
|
||||
[JsonPropertyName("jobType")]
|
||||
public required string JobType { get; init; }
|
||||
|
||||
[JsonPropertyName("artifactHash")]
|
||||
public required string ArtifactHash { get; init; }
|
||||
|
||||
[JsonPropertyName("policyHash")]
|
||||
public required string PolicyHash { get; init; }
|
||||
|
||||
[JsonPropertyName("startedAt")]
|
||||
public required DateTimeOffset StartedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("completedAt")]
|
||||
public DateTimeOffset? CompletedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("status")]
|
||||
public required string Status { get; init; }
|
||||
|
||||
[JsonPropertyName("manifestPath")]
|
||||
public string? ManifestPath { get; init; }
|
||||
|
||||
[JsonPropertyName("logsPath")]
|
||||
public string? LogsPath { get; init; }
|
||||
}
|
||||
|
||||
public sealed record OrchestratorExportResponse(
|
||||
Guid RunId,
|
||||
string MerkleRoot);
|
||||
@@ -12,6 +12,7 @@ using StellaOps.Configuration;
|
||||
using StellaOps.DependencyInjection;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
using StellaOps.Findings.Ledger.Infrastructure;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.AirGap;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.Merkle;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.Postgres;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.Projection;
|
||||
@@ -140,6 +141,10 @@ builder.Services.AddSingleton<PolicyEngineEvaluationService>();
|
||||
builder.Services.AddSingleton<IPolicyEvaluationService>(sp => sp.GetRequiredService<PolicyEngineEvaluationService>());
|
||||
builder.Services.AddSingleton<ILedgerEventWriteService, LedgerEventWriteService>();
|
||||
builder.Services.AddSingleton<IFindingWorkflowService, FindingWorkflowService>();
|
||||
builder.Services.AddSingleton<IOrchestratorExportRepository, PostgresOrchestratorExportRepository>();
|
||||
builder.Services.AddSingleton<OrchestratorExportService>();
|
||||
builder.Services.AddSingleton<IAirgapImportRepository, PostgresAirgapImportRepository>();
|
||||
builder.Services.AddSingleton<AirgapImportService>();
|
||||
builder.Services.AddSingleton<IAttachmentEncryptionService, AttachmentEncryptionService>();
|
||||
builder.Services.AddSingleton<IAttachmentUrlSigner, AttachmentUrlSigner>();
|
||||
builder.Services.AddSingleton<IConsoleCsrfValidator, ConsoleCsrfValidator>();
|
||||
@@ -300,6 +305,95 @@ app.MapGet("/ledger/export/sboms", () => TypedResults.Json(new ExportPage<SbomEx
|
||||
.RequireAuthorization(LedgerExportPolicy)
|
||||
.Produces(StatusCodes.Status200OK);
|
||||
|
||||
app.MapPost("/internal/ledger/orchestrator-export", async Task<Results<Accepted<OrchestratorExportResponse>, ProblemHttpResult>> (
|
||||
HttpContext httpContext,
|
||||
OrchestratorExportRequest request,
|
||||
OrchestratorExportService service,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (!httpContext.Request.Headers.TryGetValue("X-Stella-Tenant", out var tenantValues) || string.IsNullOrWhiteSpace(tenantValues))
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "missing_tenant");
|
||||
}
|
||||
|
||||
var tenantId = tenantValues.ToString();
|
||||
var input = new OrchestratorExportInput(
|
||||
tenantId,
|
||||
request.RunId,
|
||||
request.JobType,
|
||||
request.ArtifactHash,
|
||||
request.PolicyHash,
|
||||
request.StartedAt,
|
||||
request.CompletedAt,
|
||||
request.Status,
|
||||
request.ManifestPath,
|
||||
request.LogsPath);
|
||||
|
||||
var record = await service.RecordAsync(input, cancellationToken).ConfigureAwait(false);
|
||||
var response = new OrchestratorExportResponse(record.RunId, record.MerkleRoot);
|
||||
return TypedResults.Accepted($"/internal/ledger/orchestrator-export/{record.RunId}", response);
|
||||
})
|
||||
.WithName("OrchestratorExportRecord")
|
||||
.RequireAuthorization(LedgerWritePolicy)
|
||||
.Produces(StatusCodes.Status202Accepted)
|
||||
.ProducesProblem(StatusCodes.Status400BadRequest);
|
||||
|
||||
app.MapGet("/internal/ledger/orchestrator-export/{artifactHash}", async Task<Results<JsonHttpResult<IReadOnlyList<OrchestratorExportRecord>>, ProblemHttpResult>> (
|
||||
HttpContext httpContext,
|
||||
string artifactHash,
|
||||
OrchestratorExportService service,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (!httpContext.Request.Headers.TryGetValue("X-Stella-Tenant", out var tenantValues) || string.IsNullOrWhiteSpace(tenantValues))
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "missing_tenant");
|
||||
}
|
||||
|
||||
var records = await service.GetByArtifactAsync(tenantValues.ToString(), artifactHash, cancellationToken).ConfigureAwait(false);
|
||||
return TypedResults.Json(records);
|
||||
})
|
||||
.WithName("OrchestratorExportQuery")
|
||||
.RequireAuthorization(LedgerExportPolicy)
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.ProducesProblem(StatusCodes.Status400BadRequest);
|
||||
|
||||
app.MapPost("/internal/ledger/airgap-import", async Task<Results<Accepted<AirgapImportResponse>, ProblemHttpResult>> (
|
||||
HttpContext httpContext,
|
||||
AirgapImportRequest request,
|
||||
AirgapImportService service,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (!httpContext.Request.Headers.TryGetValue("X-Stella-Tenant", out var tenantValues) || string.IsNullOrWhiteSpace(tenantValues))
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "missing_tenant");
|
||||
}
|
||||
|
||||
var input = new AirgapImportInput(
|
||||
tenantValues.ToString(),
|
||||
request.BundleId,
|
||||
request.MirrorGeneration,
|
||||
request.MerkleRoot,
|
||||
request.TimeAnchor,
|
||||
request.Publisher,
|
||||
request.HashAlgorithm,
|
||||
request.Contents ?? Array.Empty<string>(),
|
||||
request.ImportOperator);
|
||||
|
||||
var result = await service.RecordAsync(input, cancellationToken).ConfigureAwait(false);
|
||||
if (!result.Success)
|
||||
{
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status409Conflict, title: "airgap_import_failed", detail: result.Error ?? "Failed to record air-gap import.");
|
||||
}
|
||||
|
||||
var response = new AirgapImportResponse(result.ChainId, result.SequenceNumber, result.LedgerEventId, "accepted", null);
|
||||
return TypedResults.Accepted($"/internal/ledger/airgap-import/{request.BundleId}", response);
|
||||
})
|
||||
.WithName("AirgapImportRecord")
|
||||
.RequireAuthorization(LedgerWritePolicy)
|
||||
.Produces(StatusCodes.Status202Accepted)
|
||||
.ProducesProblem(StatusCodes.Status400BadRequest)
|
||||
.ProducesProblem(StatusCodes.Status409Conflict);
|
||||
|
||||
app.Run();
|
||||
|
||||
static Created<LedgerEventResponse> CreateCreatedResponse(LedgerEventRecord record)
|
||||
|
||||
@@ -214,7 +214,7 @@ public sealed class AttestationQueryService
|
||||
sqlBuilder.Append(" LIMIT @take");
|
||||
parameters.Add(new NpgsqlParameter<int>("take", request.Limit + 1) { NpgsqlDbType = NpgsqlDbType.Integer });
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(request.TenantId, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(request.TenantId, "attestation", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(sqlBuilder.ToString(), connection)
|
||||
{
|
||||
CommandTimeout = _dataSource.CommandTimeoutSeconds
|
||||
|
||||
@@ -168,7 +168,7 @@ public sealed class ExportQueryService
|
||||
NpgsqlDbType = NpgsqlDbType.Integer
|
||||
});
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(request.TenantId, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(request.TenantId, "export", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(sqlBuilder.ToString(), connection)
|
||||
{
|
||||
CommandTimeout = _dataSource.CommandTimeoutSeconds
|
||||
|
||||
@@ -7,10 +7,15 @@ public static class LedgerChainIdGenerator
|
||||
{
|
||||
public static Guid FromTenantPolicy(string tenantId, string policyVersion)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(policyVersion);
|
||||
return FromTenantSubject(tenantId, policyVersion);
|
||||
}
|
||||
|
||||
var normalized = $"{tenantId.Trim()}::{policyVersion.Trim()}";
|
||||
public static Guid FromTenantSubject(string tenantId, string subject)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(subject);
|
||||
|
||||
var normalized = $"{tenantId.Trim()}::{subject.Trim()}";
|
||||
var bytes = Encoding.UTF8.GetBytes(normalized);
|
||||
Span<byte> guidBytes = stackalloc byte[16];
|
||||
var hash = SHA256.HashData(bytes);
|
||||
|
||||
@@ -14,8 +14,24 @@ public static class LedgerEventConstants
|
||||
public const string EventFindingRemediationPlanAdded = "finding.remediation_plan_added";
|
||||
public const string EventFindingAttachmentAdded = "finding.attachment_added";
|
||||
public const string EventFindingClosed = "finding.closed";
|
||||
public const string EventAirgapBundleImported = "airgap.bundle_imported";
|
||||
public const string EventOrchestratorExportRecorded = "orchestrator.export_recorded";
|
||||
|
||||
public static readonly ImmutableHashSet<string> SupportedEventTypes = ImmutableHashSet.Create(StringComparer.Ordinal,
|
||||
EventFindingCreated,
|
||||
EventFindingStatusChanged,
|
||||
EventFindingSeverityChanged,
|
||||
EventFindingTagUpdated,
|
||||
EventFindingCommentAdded,
|
||||
EventFindingAssignmentChanged,
|
||||
EventFindingAcceptedRisk,
|
||||
EventFindingRemediationPlanAdded,
|
||||
EventFindingAttachmentAdded,
|
||||
EventFindingClosed,
|
||||
EventAirgapBundleImported,
|
||||
EventOrchestratorExportRecorded);
|
||||
|
||||
public static readonly ImmutableHashSet<string> FindingEventTypes = ImmutableHashSet.Create(StringComparer.Ordinal,
|
||||
EventFindingCreated,
|
||||
EventFindingStatusChanged,
|
||||
EventFindingSeverityChanged,
|
||||
@@ -33,4 +49,6 @@ public static class LedgerEventConstants
|
||||
"integration");
|
||||
|
||||
public const string EmptyHash = "0000000000000000000000000000000000000000000000000000000000000000";
|
||||
|
||||
public static bool IsFindingEvent(string eventType) => FindingEventTypes.Contains(eventType);
|
||||
}
|
||||
|
||||
@@ -8,6 +8,11 @@ public sealed record FindingProjection(
|
||||
string PolicyVersion,
|
||||
string Status,
|
||||
decimal? Severity,
|
||||
decimal? RiskScore,
|
||||
string? RiskSeverity,
|
||||
string? RiskProfileVersion,
|
||||
Guid? RiskExplanationId,
|
||||
long? RiskEventSequence,
|
||||
JsonObject Labels,
|
||||
Guid CurrentEventId,
|
||||
string? ExplainRef,
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Infrastructure.AirGap;
|
||||
|
||||
public sealed record AirgapImportRecord(
|
||||
string TenantId,
|
||||
string BundleId,
|
||||
string? MirrorGeneration,
|
||||
string MerkleRoot,
|
||||
DateTimeOffset TimeAnchor,
|
||||
string? Publisher,
|
||||
string? HashAlgorithm,
|
||||
JsonArray Contents,
|
||||
DateTimeOffset ImportedAt,
|
||||
string? ImportOperator,
|
||||
Guid? LedgerEventId);
|
||||
@@ -0,0 +1,6 @@
|
||||
namespace StellaOps.Findings.Ledger.Infrastructure.AirGap;
|
||||
|
||||
public interface IAirgapImportRepository
|
||||
{
|
||||
Task InsertAsync(AirgapImportRecord record, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
namespace StellaOps.Findings.Ledger.Infrastructure.Exports;
|
||||
|
||||
public interface IOrchestratorExportRepository
|
||||
{
|
||||
Task InsertAsync(OrchestratorExportRecord record, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyList<OrchestratorExportRecord>> GetByArtifactAsync(string tenantId, string artifactHash, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
namespace StellaOps.Findings.Ledger.Infrastructure.Exports;
|
||||
|
||||
public sealed record OrchestratorExportRecord(
|
||||
string TenantId,
|
||||
Guid RunId,
|
||||
string JobType,
|
||||
string ArtifactHash,
|
||||
string PolicyHash,
|
||||
DateTimeOffset StartedAt,
|
||||
DateTimeOffset? CompletedAt,
|
||||
string Status,
|
||||
string? ManifestPath,
|
||||
string? LogsPath,
|
||||
string MerkleRoot,
|
||||
DateTimeOffset CreatedAt);
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Threading.Channels;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
using StellaOps.Findings.Ledger.Observability;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Infrastructure.Merkle;
|
||||
|
||||
@@ -18,7 +19,11 @@ public sealed class LedgerAnchorQueue
|
||||
}
|
||||
|
||||
public ValueTask EnqueueAsync(LedgerEventRecord record, CancellationToken cancellationToken)
|
||||
=> _channel.Writer.WriteAsync(record, cancellationToken);
|
||||
{
|
||||
var writeTask = _channel.Writer.WriteAsync(record, cancellationToken);
|
||||
LedgerMetrics.IncrementBacklog();
|
||||
return writeTask;
|
||||
}
|
||||
|
||||
public IAsyncEnumerable<LedgerEventRecord> ReadAllAsync(CancellationToken cancellationToken)
|
||||
=> _channel.Reader.ReadAllAsync(cancellationToken);
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
using StellaOps.Findings.Ledger.Observability;
|
||||
using StellaOps.Findings.Ledger.Options;
|
||||
using TimeProvider = System.TimeProvider;
|
||||
|
||||
@@ -35,6 +37,7 @@ public sealed class LedgerMerkleAnchorWorker : BackgroundService
|
||||
{
|
||||
await foreach (var record in _queue.ReadAllAsync(stoppingToken))
|
||||
{
|
||||
LedgerMetrics.DecrementBacklog();
|
||||
await HandleEventAsync(record, stoppingToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -80,6 +83,7 @@ public sealed class LedgerMerkleAnchorWorker : BackgroundService
|
||||
|
||||
try
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
var orderedEvents = batch.Events
|
||||
.OrderBy(e => e.SequenceNumber)
|
||||
.ThenBy(e => e.RecordedAt)
|
||||
@@ -106,10 +110,13 @@ public sealed class LedgerMerkleAnchorWorker : BackgroundService
|
||||
anchoredAt,
|
||||
anchorReference: null,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
stopwatch.Stop();
|
||||
LedgerMetrics.RecordMerkleAnchorDuration(stopwatch.Elapsed, tenantId, leafCount);
|
||||
}
|
||||
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to persist Merkle anchor for tenant {TenantId}.", tenantId);
|
||||
LedgerMetrics.RecordMerkleAnchorFailure(tenantId, ex.GetType().Name);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using System.Data;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Npgsql;
|
||||
using StellaOps.Findings.Ledger.Observability;
|
||||
using StellaOps.Findings.Ledger.Options;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
|
||||
@@ -31,15 +33,26 @@ public sealed class LedgerDataSource : IAsyncDisposable
|
||||
}
|
||||
|
||||
public Task<NpgsqlConnection> OpenConnectionAsync(string tenantId, CancellationToken cancellationToken)
|
||||
=> OpenConnectionInternalAsync(tenantId, cancellationToken);
|
||||
=> OpenConnectionInternalAsync(tenantId, "unspecified", cancellationToken);
|
||||
|
||||
private async Task<NpgsqlConnection> OpenConnectionInternalAsync(string tenantId, CancellationToken cancellationToken)
|
||||
public Task<NpgsqlConnection> OpenConnectionAsync(string tenantId, string role, CancellationToken cancellationToken)
|
||||
=> OpenConnectionInternalAsync(tenantId, role, cancellationToken);
|
||||
|
||||
private async Task<NpgsqlConnection> OpenConnectionInternalAsync(string tenantId, string role, CancellationToken cancellationToken)
|
||||
{
|
||||
var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
try
|
||||
{
|
||||
await ConfigureSessionAsync(connection, tenantId, cancellationToken).ConfigureAwait(false);
|
||||
LedgerMetrics.ConnectionOpened(role);
|
||||
connection.StateChange += (_, args) =>
|
||||
{
|
||||
if (args.CurrentState == ConnectionState.Closed)
|
||||
{
|
||||
LedgerMetrics.ConnectionClosed(role);
|
||||
}
|
||||
};
|
||||
}
|
||||
catch
|
||||
{
|
||||
|
||||
@@ -0,0 +1,94 @@
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using NpgsqlTypes;
|
||||
using StellaOps.Findings.Ledger.Hashing;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.AirGap;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
|
||||
|
||||
public sealed class PostgresAirgapImportRepository : IAirgapImportRepository
|
||||
{
|
||||
private const string InsertSql = """
|
||||
INSERT INTO airgap_imports (
|
||||
tenant_id,
|
||||
bundle_id,
|
||||
mirror_generation,
|
||||
merkle_root,
|
||||
time_anchor,
|
||||
publisher,
|
||||
hash_algorithm,
|
||||
contents,
|
||||
imported_at,
|
||||
import_operator,
|
||||
ledger_event_id)
|
||||
VALUES (
|
||||
@tenant_id,
|
||||
@bundle_id,
|
||||
@mirror_generation,
|
||||
@merkle_root,
|
||||
@time_anchor,
|
||||
@publisher,
|
||||
@hash_algorithm,
|
||||
@contents,
|
||||
@imported_at,
|
||||
@import_operator,
|
||||
@ledger_event_id)
|
||||
ON CONFLICT (tenant_id, bundle_id, time_anchor)
|
||||
DO UPDATE SET
|
||||
merkle_root = EXCLUDED.merkle_root,
|
||||
publisher = EXCLUDED.publisher,
|
||||
hash_algorithm = EXCLUDED.hash_algorithm,
|
||||
contents = EXCLUDED.contents,
|
||||
imported_at = EXCLUDED.imported_at,
|
||||
import_operator = EXCLUDED.import_operator,
|
||||
ledger_event_id = EXCLUDED.ledger_event_id;
|
||||
""";
|
||||
|
||||
private readonly LedgerDataSource _dataSource;
|
||||
private readonly ILogger<PostgresAirgapImportRepository> _logger;
|
||||
|
||||
public PostgresAirgapImportRepository(
|
||||
LedgerDataSource dataSource,
|
||||
ILogger<PostgresAirgapImportRepository> logger)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task InsertAsync(AirgapImportRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(record);
|
||||
|
||||
var canonicalContents = LedgerCanonicalJsonSerializer.Canonicalize(record.Contents);
|
||||
var contentsJson = canonicalContents.ToJsonString();
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(record.TenantId, "airgap-import", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(InsertSql, connection)
|
||||
{
|
||||
CommandTimeout = _dataSource.CommandTimeoutSeconds
|
||||
};
|
||||
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("tenant_id", record.TenantId) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("bundle_id", record.BundleId) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<string?>("mirror_generation", record.MirrorGeneration) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("merkle_root", record.MerkleRoot) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<DateTimeOffset>("time_anchor", record.TimeAnchor) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
|
||||
command.Parameters.Add(new NpgsqlParameter<string?>("publisher", record.Publisher) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<string?>("hash_algorithm", record.HashAlgorithm) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("contents", contentsJson) { NpgsqlDbType = NpgsqlDbType.Jsonb });
|
||||
command.Parameters.Add(new NpgsqlParameter<DateTimeOffset>("imported_at", record.ImportedAt) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
|
||||
command.Parameters.Add(new NpgsqlParameter<string?>("import_operator", record.ImportOperator) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<Guid?>("ledger_event_id", record.LedgerEventId) { NpgsqlDbType = NpgsqlDbType.Uuid });
|
||||
|
||||
try
|
||||
{
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (PostgresException ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to insert air-gap import for tenant {TenantId} bundle {BundleId}.", record.TenantId, record.BundleId);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
private const string GetProjectionSql = """
|
||||
SELECT status,
|
||||
severity,
|
||||
risk_score,
|
||||
risk_severity,
|
||||
risk_profile_version,
|
||||
risk_explanation_id,
|
||||
risk_event_sequence,
|
||||
labels,
|
||||
current_event_id,
|
||||
explain_ref,
|
||||
@@ -31,6 +36,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
policy_version,
|
||||
status,
|
||||
severity,
|
||||
risk_score,
|
||||
risk_severity,
|
||||
risk_profile_version,
|
||||
risk_explanation_id,
|
||||
risk_event_sequence,
|
||||
labels,
|
||||
current_event_id,
|
||||
explain_ref,
|
||||
@@ -43,6 +53,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
@policy_version,
|
||||
@status,
|
||||
@severity,
|
||||
@risk_score,
|
||||
@risk_severity,
|
||||
@risk_profile_version,
|
||||
@risk_explanation_id,
|
||||
@risk_event_sequence,
|
||||
@labels,
|
||||
@current_event_id,
|
||||
@explain_ref,
|
||||
@@ -53,6 +68,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
DO UPDATE SET
|
||||
status = EXCLUDED.status,
|
||||
severity = EXCLUDED.severity,
|
||||
risk_score = EXCLUDED.risk_score,
|
||||
risk_severity = EXCLUDED.risk_severity,
|
||||
risk_profile_version = EXCLUDED.risk_profile_version,
|
||||
risk_explanation_id = EXCLUDED.risk_explanation_id,
|
||||
risk_event_sequence = EXCLUDED.risk_event_sequence,
|
||||
labels = EXCLUDED.labels,
|
||||
current_event_id = EXCLUDED.current_event_id,
|
||||
explain_ref = EXCLUDED.explain_ref,
|
||||
@@ -153,7 +173,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
|
||||
public async Task<FindingProjection?> GetAsync(string tenantId, string findingId, string policyVersion, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "projector", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(GetProjectionSql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
@@ -168,11 +188,16 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
|
||||
var status = reader.GetString(0);
|
||||
var severity = reader.IsDBNull(1) ? (decimal?)null : reader.GetDecimal(1);
|
||||
var labelsJson = reader.GetFieldValue<string>(2);
|
||||
var riskScore = reader.IsDBNull(2) ? (decimal?)null : reader.GetDecimal(2);
|
||||
var riskSeverity = reader.IsDBNull(3) ? null : reader.GetString(3);
|
||||
var riskProfileVersion = reader.IsDBNull(4) ? null : reader.GetString(4);
|
||||
var riskExplanationId = reader.IsDBNull(5) ? (Guid?)null : reader.GetGuid(5);
|
||||
var riskEventSequence = reader.IsDBNull(6) ? (long?)null : reader.GetInt64(6);
|
||||
var labelsJson = reader.GetFieldValue<string>(7);
|
||||
var labels = JsonNode.Parse(labelsJson)?.AsObject() ?? new JsonObject();
|
||||
var currentEventId = reader.GetGuid(3);
|
||||
var explainRef = reader.IsDBNull(4) ? null : reader.GetString(4);
|
||||
var rationaleJson = reader.IsDBNull(5) ? string.Empty : reader.GetFieldValue<string>(5);
|
||||
var currentEventId = reader.GetGuid(8);
|
||||
var explainRef = reader.IsDBNull(9) ? null : reader.GetString(9);
|
||||
var rationaleJson = reader.IsDBNull(10) ? string.Empty : reader.GetFieldValue<string>(10);
|
||||
JsonArray rationale;
|
||||
if (string.IsNullOrWhiteSpace(rationaleJson))
|
||||
{
|
||||
@@ -182,8 +207,8 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
{
|
||||
rationale = JsonNode.Parse(rationaleJson) as JsonArray ?? new JsonArray();
|
||||
}
|
||||
var updatedAt = reader.GetFieldValue<DateTimeOffset>(6);
|
||||
var cycleHash = reader.GetString(7);
|
||||
var updatedAt = reader.GetFieldValue<DateTimeOffset>(11);
|
||||
var cycleHash = reader.GetString(12);
|
||||
|
||||
return new FindingProjection(
|
||||
tenantId,
|
||||
@@ -191,6 +216,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
policyVersion,
|
||||
status,
|
||||
severity,
|
||||
riskScore,
|
||||
riskSeverity,
|
||||
riskProfileVersion,
|
||||
riskExplanationId,
|
||||
riskEventSequence,
|
||||
labels,
|
||||
currentEventId,
|
||||
explainRef,
|
||||
@@ -203,7 +233,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(projection);
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(projection.TenantId, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(projection.TenantId, "projector", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(UpsertProjectionSql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
|
||||
@@ -212,6 +242,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
command.Parameters.AddWithValue("policy_version", projection.PolicyVersion);
|
||||
command.Parameters.AddWithValue("status", projection.Status);
|
||||
command.Parameters.AddWithValue("severity", projection.Severity.HasValue ? projection.Severity.Value : (object)DBNull.Value);
|
||||
command.Parameters.AddWithValue("risk_score", projection.RiskScore.HasValue ? projection.RiskScore.Value : (object)DBNull.Value);
|
||||
command.Parameters.AddWithValue("risk_severity", projection.RiskSeverity ?? (object)DBNull.Value);
|
||||
command.Parameters.AddWithValue("risk_profile_version", projection.RiskProfileVersion ?? (object)DBNull.Value);
|
||||
command.Parameters.AddWithValue("risk_explanation_id", projection.RiskExplanationId ?? (object)DBNull.Value);
|
||||
command.Parameters.AddWithValue("risk_event_sequence", projection.RiskEventSequence.HasValue ? projection.RiskEventSequence.Value : (object)DBNull.Value);
|
||||
|
||||
var labelsCanonical = LedgerCanonicalJsonSerializer.Canonicalize(projection.Labels);
|
||||
var labelsJson = labelsCanonical.ToJsonString();
|
||||
@@ -233,7 +268,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(entry);
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(entry.TenantId, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(entry.TenantId, "projector", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(InsertHistorySql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
|
||||
@@ -254,7 +289,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(entry);
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(entry.TenantId, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(entry.TenantId, "projector", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(InsertActionSql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
|
||||
@@ -275,7 +310,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
|
||||
public async Task<ProjectionCheckpoint> GetCheckpointAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, "projector", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(SelectCheckpointSql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
command.Parameters.AddWithValue("worker_id", DefaultWorkerId);
|
||||
@@ -296,7 +331,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(checkpoint);
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, "projector", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(UpsertCheckpointSql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
|
||||
|
||||
@@ -96,7 +96,7 @@ public sealed class PostgresLedgerEventRepository : ILedgerEventRepository
|
||||
|
||||
public async Task<LedgerEventRecord?> GetByEventIdAsync(string tenantId, Guid eventId, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer-read", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(SelectByEventIdSql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
@@ -113,7 +113,7 @@ public sealed class PostgresLedgerEventRepository : ILedgerEventRepository
|
||||
|
||||
public async Task<LedgerChainHead?> GetChainHeadAsync(string tenantId, Guid chainId, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer-read", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(SelectChainHeadSql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
@@ -133,7 +133,7 @@ public sealed class PostgresLedgerEventRepository : ILedgerEventRepository
|
||||
|
||||
public async Task AppendAsync(LedgerEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(record.TenantId, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(record.TenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(InsertEventSql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
|
||||
@@ -236,7 +236,7 @@ public sealed class PostgresLedgerEventRepository : ILedgerEventRepository
|
||||
ORDER BY recorded_at DESC
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer-read", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
|
||||
@@ -57,7 +57,7 @@ public sealed class PostgresLedgerEventStream : ILedgerEventStream
|
||||
|
||||
var records = new List<LedgerEventRecord>(batchSize);
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, "projector", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(ReadEventsSql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
command.Parameters.AddWithValue("last_recorded_at", checkpoint.LastRecordedAt);
|
||||
|
||||
@@ -55,7 +55,7 @@ public sealed class PostgresMerkleAnchorRepository : IMerkleAnchorRepository
|
||||
string? anchorReference,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "anchor", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(InsertAnchorSql, connection);
|
||||
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
|
||||
|
||||
|
||||
@@ -0,0 +1,146 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using NpgsqlTypes;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.Exports;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
|
||||
|
||||
public sealed class PostgresOrchestratorExportRepository : IOrchestratorExportRepository
|
||||
{
|
||||
private const string UpsertSql = """
|
||||
INSERT INTO orchestrator_exports (
|
||||
tenant_id,
|
||||
run_id,
|
||||
job_type,
|
||||
artifact_hash,
|
||||
policy_hash,
|
||||
started_at,
|
||||
completed_at,
|
||||
status,
|
||||
manifest_path,
|
||||
logs_path,
|
||||
merkle_root,
|
||||
created_at)
|
||||
VALUES (
|
||||
@tenant_id,
|
||||
@run_id,
|
||||
@job_type,
|
||||
@artifact_hash,
|
||||
@policy_hash,
|
||||
@started_at,
|
||||
@completed_at,
|
||||
@status,
|
||||
@manifest_path,
|
||||
@logs_path,
|
||||
@merkle_root,
|
||||
@created_at)
|
||||
ON CONFLICT (tenant_id, run_id)
|
||||
DO UPDATE SET
|
||||
job_type = EXCLUDED.job_type,
|
||||
artifact_hash = EXCLUDED.artifact_hash,
|
||||
policy_hash = EXCLUDED.policy_hash,
|
||||
started_at = EXCLUDED.started_at,
|
||||
completed_at = EXCLUDED.completed_at,
|
||||
status = EXCLUDED.status,
|
||||
manifest_path = EXCLUDED.manifest_path,
|
||||
logs_path = EXCLUDED.logs_path,
|
||||
merkle_root = EXCLUDED.merkle_root,
|
||||
created_at = EXCLUDED.created_at;
|
||||
""";
|
||||
|
||||
private const string SelectByArtifactSql = """
|
||||
SELECT run_id,
|
||||
job_type,
|
||||
artifact_hash,
|
||||
policy_hash,
|
||||
started_at,
|
||||
completed_at,
|
||||
status,
|
||||
manifest_path,
|
||||
logs_path,
|
||||
merkle_root,
|
||||
created_at
|
||||
FROM orchestrator_exports
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND artifact_hash = @artifact_hash
|
||||
ORDER BY completed_at DESC NULLS LAST, started_at DESC;
|
||||
""";
|
||||
|
||||
private readonly LedgerDataSource _dataSource;
|
||||
private readonly ILogger<PostgresOrchestratorExportRepository> _logger;
|
||||
|
||||
public PostgresOrchestratorExportRepository(
|
||||
LedgerDataSource dataSource,
|
||||
ILogger<PostgresOrchestratorExportRepository> logger)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task InsertAsync(OrchestratorExportRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(record);
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(record.TenantId, "orchestrator-export", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(UpsertSql, connection)
|
||||
{
|
||||
CommandTimeout = _dataSource.CommandTimeoutSeconds
|
||||
};
|
||||
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("tenant_id", record.TenantId) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<Guid>("run_id", record.RunId) { NpgsqlDbType = NpgsqlDbType.Uuid });
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("job_type", record.JobType) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("artifact_hash", record.ArtifactHash) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("policy_hash", record.PolicyHash) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<DateTimeOffset>("started_at", record.StartedAt) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
|
||||
command.Parameters.Add(new NpgsqlParameter<DateTimeOffset?>("completed_at", record.CompletedAt) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("status", record.Status) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<string?>("manifest_path", record.ManifestPath) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<string?>("logs_path", record.LogsPath) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("merkle_root", record.MerkleRoot) { NpgsqlDbType = NpgsqlDbType.Char });
|
||||
command.Parameters.Add(new NpgsqlParameter<DateTimeOffset>("created_at", record.CreatedAt) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
|
||||
|
||||
try
|
||||
{
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (PostgresException ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to upsert orchestrator export for tenant {TenantId} run {RunId}.", record.TenantId, record.RunId);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<OrchestratorExportRecord>> GetByArtifactAsync(string tenantId, string artifactHash, CancellationToken cancellationToken)
|
||||
{
|
||||
var results = new List<OrchestratorExportRecord>();
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "orchestrator-export", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(SelectByArtifactSql, connection)
|
||||
{
|
||||
CommandTimeout = _dataSource.CommandTimeoutSeconds
|
||||
};
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("tenant_id", tenantId) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
command.Parameters.Add(new NpgsqlParameter<string>("artifact_hash", artifactHash) { NpgsqlDbType = NpgsqlDbType.Text });
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(new OrchestratorExportRecord(
|
||||
TenantId: tenantId,
|
||||
RunId: reader.GetGuid(0),
|
||||
JobType: reader.GetString(1),
|
||||
ArtifactHash: reader.GetString(2),
|
||||
PolicyHash: reader.GetString(3),
|
||||
StartedAt: reader.GetFieldValue<DateTimeOffset>(4),
|
||||
CompletedAt: reader.IsDBNull(5) ? (DateTimeOffset?)null : reader.GetFieldValue<DateTimeOffset>(5),
|
||||
Status: reader.GetString(6),
|
||||
ManifestPath: reader.IsDBNull(7) ? null : reader.GetString(7),
|
||||
LogsPath: reader.IsDBNull(8) ? null : reader.GetString(8),
|
||||
MerkleRoot: reader.GetString(9),
|
||||
CreatedAt: reader.GetFieldValue<DateTimeOffset>(10)));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
}
|
||||
@@ -74,6 +74,10 @@ public sealed class LedgerProjectionWorker : BackgroundService
|
||||
continue;
|
||||
}
|
||||
|
||||
var batchStopwatch = Stopwatch.StartNew();
|
||||
var batchTenant = batch[0].TenantId;
|
||||
var batchFailed = false;
|
||||
|
||||
foreach (var record in batch)
|
||||
{
|
||||
using var scope = _logger.BeginScope(new Dictionary<string, object?>
|
||||
@@ -86,6 +90,19 @@ public sealed class LedgerProjectionWorker : BackgroundService
|
||||
});
|
||||
using var activity = LedgerTelemetry.StartProjectionApply(record);
|
||||
var applyStopwatch = Stopwatch.StartNew();
|
||||
if (!LedgerEventConstants.IsFindingEvent(record.EventType))
|
||||
{
|
||||
checkpoint = checkpoint with
|
||||
{
|
||||
LastRecordedAt = record.RecordedAt,
|
||||
LastEventId = record.EventId,
|
||||
UpdatedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
await _repository.SaveCheckpointAsync(checkpoint, stoppingToken).ConfigureAwait(false);
|
||||
_logger.LogInformation("Skipped non-finding ledger event {EventId} type {EventType} during projection.", record.EventId, record.EventType);
|
||||
continue;
|
||||
}
|
||||
string? evaluationStatus = null;
|
||||
|
||||
try
|
||||
@@ -131,10 +148,17 @@ public sealed class LedgerProjectionWorker : BackgroundService
|
||||
{
|
||||
LedgerTelemetry.MarkError(activity, "projection_failed");
|
||||
_logger.LogError(ex, "Failed to project ledger event {EventId} for tenant {TenantId}.", record.EventId, record.TenantId);
|
||||
batchFailed = true;
|
||||
await DelayAsync(stoppingToken).ConfigureAwait(false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
batchStopwatch.Stop();
|
||||
if (!batchFailed)
|
||||
{
|
||||
LedgerMetrics.RecordProjectionRebuild(batchStopwatch.Elapsed, batchTenant, "replay");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Observability;
|
||||
@@ -6,10 +7,16 @@ internal static class LedgerMetrics
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Findings.Ledger");
|
||||
|
||||
private static readonly Histogram<double> WriteDurationSeconds = Meter.CreateHistogram<double>(
|
||||
"ledger_write_duration_seconds",
|
||||
unit: "s",
|
||||
description: "Latency of successful ledger append operations.");
|
||||
|
||||
// Compatibility with earlier drafts
|
||||
private static readonly Histogram<double> WriteLatencySeconds = Meter.CreateHistogram<double>(
|
||||
"ledger_write_latency_seconds",
|
||||
unit: "s",
|
||||
description: "Latency of successful ledger append operations.");
|
||||
description: "Deprecated alias for ledger_write_duration_seconds.");
|
||||
|
||||
private static readonly Counter<long> EventsTotal = Meter.CreateCounter<long>(
|
||||
"ledger_events_total",
|
||||
@@ -20,15 +27,40 @@ internal static class LedgerMetrics
|
||||
unit: "s",
|
||||
description: "Duration to apply a ledger event to the finding projection.");
|
||||
|
||||
private static readonly Histogram<double> ProjectionLagSeconds = Meter.CreateHistogram<double>(
|
||||
"ledger_projection_lag_seconds",
|
||||
private static readonly Histogram<double> ProjectionRebuildSeconds = Meter.CreateHistogram<double>(
|
||||
"ledger_projection_rebuild_seconds",
|
||||
unit: "s",
|
||||
description: "Lag between ledger recorded_at and projection application time.");
|
||||
description: "Duration of projection replay/rebuild batches.");
|
||||
|
||||
private static readonly Counter<long> ProjectionEventsTotal = Meter.CreateCounter<long>(
|
||||
"ledger_projection_events_total",
|
||||
description: "Number of ledger events applied to projections.");
|
||||
|
||||
private static readonly Histogram<double> MerkleAnchorDurationSeconds = Meter.CreateHistogram<double>(
|
||||
"ledger_merkle_anchor_duration_seconds",
|
||||
unit: "s",
|
||||
description: "Duration to persist Merkle anchor batches.");
|
||||
|
||||
private static readonly Counter<long> MerkleAnchorFailures = Meter.CreateCounter<long>(
|
||||
"ledger_merkle_anchor_failures_total",
|
||||
description: "Count of Merkle anchor failures by reason.");
|
||||
|
||||
private static readonly ObservableGauge<double> ProjectionLagGauge =
|
||||
Meter.CreateObservableGauge("ledger_projection_lag_seconds", ObserveProjectionLag, unit: "s",
|
||||
description: "Lag between ledger recorded_at and projection application time.");
|
||||
|
||||
private static readonly ObservableGauge<long> IngestBacklogGauge =
|
||||
Meter.CreateObservableGauge("ledger_ingest_backlog_events", ObserveBacklog,
|
||||
description: "Number of events buffered for ingestion/anchoring.");
|
||||
|
||||
private static readonly ObservableGauge<long> DbConnectionsGauge =
|
||||
Meter.CreateObservableGauge("ledger_db_connections_active", ObserveDbConnections,
|
||||
description: "Active PostgreSQL connections by role.");
|
||||
|
||||
private static readonly ConcurrentDictionary<string, double> ProjectionLagByTenant = new(StringComparer.Ordinal);
|
||||
private static readonly ConcurrentDictionary<string, long> DbConnectionsByRole = new(StringComparer.OrdinalIgnoreCase);
|
||||
private static long _ingestBacklog;
|
||||
|
||||
public static void RecordWriteSuccess(TimeSpan duration, string? tenantId, string? eventType, string? source)
|
||||
{
|
||||
var tags = new KeyValuePair<string, object?>[]
|
||||
@@ -38,6 +70,7 @@ internal static class LedgerMetrics
|
||||
new("source", source ?? string.Empty)
|
||||
};
|
||||
|
||||
WriteDurationSeconds.Record(duration.TotalSeconds, tags);
|
||||
WriteLatencySeconds.Record(duration.TotalSeconds, tags);
|
||||
EventsTotal.Add(1, tags);
|
||||
}
|
||||
@@ -59,7 +92,90 @@ internal static class LedgerMetrics
|
||||
};
|
||||
|
||||
ProjectionApplySeconds.Record(duration.TotalSeconds, tags);
|
||||
ProjectionLagSeconds.Record(lagSeconds, tags);
|
||||
ProjectionEventsTotal.Add(1, tags);
|
||||
UpdateProjectionLag(tenantId, lagSeconds);
|
||||
}
|
||||
|
||||
public static void RecordProjectionRebuild(TimeSpan duration, string? tenantId, string scenario)
|
||||
{
|
||||
var tags = new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("tenant", tenantId ?? string.Empty),
|
||||
new("scenario", scenario)
|
||||
};
|
||||
|
||||
ProjectionRebuildSeconds.Record(duration.TotalSeconds, tags);
|
||||
}
|
||||
|
||||
public static void RecordMerkleAnchorDuration(TimeSpan duration, string tenantId, int leafCount)
|
||||
{
|
||||
var tags = new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("tenant", tenantId),
|
||||
new("leaf_count", leafCount)
|
||||
};
|
||||
MerkleAnchorDurationSeconds.Record(duration.TotalSeconds, tags);
|
||||
}
|
||||
|
||||
public static void RecordMerkleAnchorFailure(string tenantId, string reason)
|
||||
{
|
||||
var tags = new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("tenant", tenantId),
|
||||
new("reason", reason)
|
||||
};
|
||||
MerkleAnchorFailures.Add(1, tags);
|
||||
}
|
||||
|
||||
public static void IncrementBacklog() => Interlocked.Increment(ref _ingestBacklog);
|
||||
|
||||
public static void DecrementBacklog()
|
||||
{
|
||||
var value = Interlocked.Decrement(ref _ingestBacklog);
|
||||
if (value < 0)
|
||||
{
|
||||
Interlocked.Exchange(ref _ingestBacklog, 0);
|
||||
}
|
||||
}
|
||||
|
||||
public static void ConnectionOpened(string role)
|
||||
{
|
||||
var normalized = NormalizeRole(role);
|
||||
DbConnectionsByRole.AddOrUpdate(normalized, _ => 1, (_, current) => current + 1);
|
||||
}
|
||||
|
||||
public static void ConnectionClosed(string role)
|
||||
{
|
||||
var normalized = NormalizeRole(role);
|
||||
DbConnectionsByRole.AddOrUpdate(normalized, _ => 0, (_, current) => Math.Max(0, current - 1));
|
||||
}
|
||||
|
||||
public static void UpdateProjectionLag(string? tenantId, double lagSeconds)
|
||||
{
|
||||
var key = string.IsNullOrWhiteSpace(tenantId) ? string.Empty : tenantId;
|
||||
ProjectionLagByTenant[key] = lagSeconds < 0 ? 0 : lagSeconds;
|
||||
}
|
||||
|
||||
private static IEnumerable<Measurement<double>> ObserveProjectionLag()
|
||||
{
|
||||
foreach (var kvp in ProjectionLagByTenant)
|
||||
{
|
||||
yield return new Measurement<double>(kvp.Value, new KeyValuePair<string, object?>("tenant", kvp.Key));
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<Measurement<long>> ObserveBacklog()
|
||||
{
|
||||
yield return new Measurement<long>(Interlocked.Read(ref _ingestBacklog));
|
||||
}
|
||||
|
||||
private static IEnumerable<Measurement<long>> ObserveDbConnections()
|
||||
{
|
||||
foreach (var kvp in DbConnectionsByRole)
|
||||
{
|
||||
yield return new Measurement<long>(kvp.Value, new KeyValuePair<string, object?>("role", kvp.Key));
|
||||
}
|
||||
}
|
||||
|
||||
private static string NormalizeRole(string role) => string.IsNullOrWhiteSpace(role) ? "unspecified" : role.ToLowerInvariant();
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.Exports;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Observability;
|
||||
|
||||
@@ -12,6 +13,8 @@ internal static class LedgerTimeline
|
||||
{
|
||||
private static readonly EventId LedgerAppended = new(6101, "ledger.event.appended");
|
||||
private static readonly EventId ProjectionUpdated = new(6201, "ledger.projection.updated");
|
||||
private static readonly EventId OrchestratorExport = new(6301, "ledger.export.recorded");
|
||||
private static readonly EventId AirgapImport = new(6401, "ledger.airgap.imported");
|
||||
|
||||
public static void EmitLedgerAppended(ILogger logger, LedgerEventRecord record, string? evidenceBundleRef = null)
|
||||
{
|
||||
@@ -62,4 +65,38 @@ internal static class LedgerTimeline
|
||||
traceId,
|
||||
evidenceBundleRef ?? record.EvidenceBundleReference ?? string.Empty);
|
||||
}
|
||||
|
||||
public static void EmitOrchestratorExport(ILogger logger, OrchestratorExportRecord record)
|
||||
{
|
||||
if (logger is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogInformation(
|
||||
OrchestratorExport,
|
||||
"timeline ledger.export.recorded tenant={Tenant} run={RunId} artifact={ArtifactHash} policy={PolicyHash} status={Status} merkle_root={MerkleRoot}",
|
||||
record.TenantId,
|
||||
record.RunId,
|
||||
record.ArtifactHash,
|
||||
record.PolicyHash,
|
||||
record.Status,
|
||||
record.MerkleRoot);
|
||||
}
|
||||
|
||||
public static void EmitAirgapImport(ILogger logger, string tenantId, string bundleId, string merkleRoot, Guid? ledgerEventId)
|
||||
{
|
||||
if (logger is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogInformation(
|
||||
AirgapImport,
|
||||
"timeline ledger.airgap.imported tenant={Tenant} bundle={BundleId} merkle_root={MerkleRoot} ledger_event={LedgerEvent}",
|
||||
tenantId,
|
||||
bundleId,
|
||||
merkleRoot,
|
||||
ledgerEventId?.ToString() ?? string.Empty);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,152 @@
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
using StellaOps.Findings.Ledger.Infrastructure;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.AirGap;
|
||||
using StellaOps.Findings.Ledger.Observability;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Services;
|
||||
|
||||
public sealed record AirgapImportInput(
|
||||
string TenantId,
|
||||
string BundleId,
|
||||
string? MirrorGeneration,
|
||||
string MerkleRoot,
|
||||
DateTimeOffset TimeAnchor,
|
||||
string? Publisher,
|
||||
string? HashAlgorithm,
|
||||
IReadOnlyList<string> Contents,
|
||||
string? ImportOperator);
|
||||
|
||||
public sealed record AirgapImportResult(
|
||||
bool Success,
|
||||
Guid ChainId,
|
||||
long? SequenceNumber,
|
||||
Guid? LedgerEventId,
|
||||
string? Error);
|
||||
|
||||
public sealed class AirgapImportService
|
||||
{
|
||||
private readonly ILedgerEventRepository _ledgerEventRepository;
|
||||
private readonly ILedgerEventWriteService _writeService;
|
||||
private readonly IAirgapImportRepository _repository;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<AirgapImportService> _logger;
|
||||
|
||||
public AirgapImportService(
|
||||
ILedgerEventRepository ledgerEventRepository,
|
||||
ILedgerEventWriteService writeService,
|
||||
IAirgapImportRepository repository,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<AirgapImportService> logger)
|
||||
{
|
||||
_ledgerEventRepository = ledgerEventRepository ?? throw new ArgumentNullException(nameof(ledgerEventRepository));
|
||||
_writeService = writeService ?? throw new ArgumentNullException(nameof(writeService));
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<AirgapImportResult> RecordAsync(AirgapImportInput input, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(input);
|
||||
|
||||
var chainId = LedgerChainIdGenerator.FromTenantSubject(input.TenantId, $"airgap::{input.BundleId}");
|
||||
var chainHead = await _ledgerEventRepository.GetChainHeadAsync(input.TenantId, chainId, cancellationToken).ConfigureAwait(false);
|
||||
var sequence = (chainHead?.SequenceNumber ?? 0) + 1;
|
||||
var previousHash = chainHead?.EventHash ?? LedgerEventConstants.EmptyHash;
|
||||
|
||||
var eventId = Guid.NewGuid();
|
||||
var recordedAt = _timeProvider.GetUtcNow();
|
||||
|
||||
var payload = new JsonObject
|
||||
{
|
||||
["airgap"] = new JsonObject
|
||||
{
|
||||
["bundleId"] = input.BundleId,
|
||||
["mirrorGeneration"] = input.MirrorGeneration,
|
||||
["merkleRoot"] = input.MerkleRoot,
|
||||
["timeAnchor"] = input.TimeAnchor.ToUniversalTime().ToString("O"),
|
||||
["publisher"] = input.Publisher,
|
||||
["hashAlgorithm"] = input.HashAlgorithm,
|
||||
["contents"] = new JsonArray(input.Contents.Select(c => (JsonNode)c).ToArray())
|
||||
}
|
||||
};
|
||||
|
||||
var envelope = new JsonObject
|
||||
{
|
||||
["event"] = new JsonObject
|
||||
{
|
||||
["id"] = eventId.ToString(),
|
||||
["type"] = LedgerEventConstants.EventAirgapBundleImported,
|
||||
["tenant"] = input.TenantId,
|
||||
["chainId"] = chainId.ToString(),
|
||||
["sequence"] = sequence,
|
||||
["policyVersion"] = input.MirrorGeneration ?? "airgap-bundle",
|
||||
["artifactId"] = input.BundleId,
|
||||
["finding"] = new JsonObject
|
||||
{
|
||||
["id"] = input.BundleId,
|
||||
["artifactId"] = input.BundleId,
|
||||
["vulnId"] = "airgap-import"
|
||||
},
|
||||
["actor"] = new JsonObject
|
||||
{
|
||||
["id"] = input.ImportOperator ?? "airgap-operator",
|
||||
["type"] = "operator"
|
||||
},
|
||||
["occurredAt"] = FormatTimestamp(input.TimeAnchor),
|
||||
["recordedAt"] = FormatTimestamp(recordedAt),
|
||||
["payload"] = payload.DeepClone()
|
||||
}
|
||||
};
|
||||
|
||||
var draft = new LedgerEventDraft(
|
||||
input.TenantId,
|
||||
chainId,
|
||||
sequence,
|
||||
eventId,
|
||||
LedgerEventConstants.EventAirgapBundleImported,
|
||||
input.MirrorGeneration ?? "airgap-bundle",
|
||||
input.BundleId,
|
||||
input.BundleId,
|
||||
SourceRunId: null,
|
||||
ActorId: input.ImportOperator ?? "airgap-operator",
|
||||
ActorType: "operator",
|
||||
OccurredAt: input.TimeAnchor.ToUniversalTime(),
|
||||
RecordedAt: recordedAt,
|
||||
Payload: payload,
|
||||
CanonicalEnvelope: envelope,
|
||||
ProvidedPreviousHash: previousHash);
|
||||
|
||||
var writeResult = await _writeService.AppendAsync(draft, cancellationToken).ConfigureAwait(false);
|
||||
if (writeResult.Status is not (LedgerWriteStatus.Success or LedgerWriteStatus.Idempotent))
|
||||
{
|
||||
var error = string.Join(";", writeResult.Errors);
|
||||
return new AirgapImportResult(false, chainId, sequence, writeResult.Record?.EventId, error);
|
||||
}
|
||||
|
||||
var ledgerEventId = writeResult.Record?.EventId;
|
||||
|
||||
var record = new AirgapImportRecord(
|
||||
input.TenantId,
|
||||
input.BundleId,
|
||||
input.MirrorGeneration,
|
||||
input.MerkleRoot,
|
||||
input.TimeAnchor.ToUniversalTime(),
|
||||
input.Publisher,
|
||||
input.HashAlgorithm,
|
||||
new JsonArray(input.Contents.Select(c => (JsonNode)c).ToArray()),
|
||||
recordedAt,
|
||||
input.ImportOperator,
|
||||
ledgerEventId);
|
||||
|
||||
await _repository.InsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
LedgerTimeline.EmitAirgapImport(_logger, input.TenantId, input.BundleId, input.MerkleRoot, ledgerEventId);
|
||||
|
||||
return new AirgapImportResult(true, chainId, sequence, ledgerEventId, null);
|
||||
}
|
||||
|
||||
private static string FormatTimestamp(DateTimeOffset value)
|
||||
=> value.ToUniversalTime().ToString("yyyy-MM-dd'T'HH:mm:ss.fff'Z'");
|
||||
}
|
||||
@@ -22,6 +22,11 @@ public static class LedgerProjectionReducer
|
||||
|
||||
var status = evaluation.Status ?? DetermineStatus(record.EventType, payload, current?.Status);
|
||||
var severity = evaluation.Severity ?? DetermineSeverity(payload, current?.Severity);
|
||||
var riskScore = evaluation.RiskScore ?? current?.RiskScore;
|
||||
var riskSeverity = evaluation.RiskSeverity ?? current?.RiskSeverity;
|
||||
var riskProfileVersion = evaluation.RiskProfileVersion ?? current?.RiskProfileVersion;
|
||||
var riskExplanationId = evaluation.RiskExplanationId ?? current?.RiskExplanationId;
|
||||
var riskEventSequence = evaluation.RiskEventSequence ?? current?.RiskEventSequence ?? record.SequenceNumber;
|
||||
|
||||
var labels = CloneLabels(evaluation.Labels);
|
||||
MergeLabels(labels, payload);
|
||||
@@ -41,6 +46,11 @@ public static class LedgerProjectionReducer
|
||||
record.PolicyVersion,
|
||||
status,
|
||||
severity,
|
||||
riskScore,
|
||||
riskSeverity,
|
||||
riskProfileVersion,
|
||||
riskExplanationId,
|
||||
riskEventSequence,
|
||||
labels,
|
||||
record.EventId,
|
||||
explainRef,
|
||||
|
||||
@@ -0,0 +1,86 @@
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Findings.Ledger.Hashing;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.Exports;
|
||||
using StellaOps.Findings.Ledger.Observability;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Services;
|
||||
|
||||
public sealed record OrchestratorExportInput(
|
||||
string TenantId,
|
||||
Guid RunId,
|
||||
string JobType,
|
||||
string ArtifactHash,
|
||||
string PolicyHash,
|
||||
DateTimeOffset StartedAt,
|
||||
DateTimeOffset? CompletedAt,
|
||||
string Status,
|
||||
string? ManifestPath,
|
||||
string? LogsPath);
|
||||
|
||||
public sealed class OrchestratorExportService
|
||||
{
|
||||
private readonly IOrchestratorExportRepository _repository;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<OrchestratorExportService> _logger;
|
||||
|
||||
public OrchestratorExportService(
|
||||
IOrchestratorExportRepository repository,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<OrchestratorExportService> logger)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<OrchestratorExportRecord> RecordAsync(OrchestratorExportInput input, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(input);
|
||||
|
||||
var canonical = CreateCanonicalPayload(input);
|
||||
var merkleRoot = HashUtilities.ComputeSha256Hex(LedgerCanonicalJsonSerializer.Serialize(canonical));
|
||||
|
||||
var record = new OrchestratorExportRecord(
|
||||
input.TenantId,
|
||||
input.RunId,
|
||||
input.JobType,
|
||||
input.ArtifactHash,
|
||||
input.PolicyHash,
|
||||
input.StartedAt.ToUniversalTime(),
|
||||
input.CompletedAt?.ToUniversalTime(),
|
||||
input.Status,
|
||||
input.ManifestPath,
|
||||
input.LogsPath,
|
||||
merkleRoot,
|
||||
_timeProvider.GetUtcNow());
|
||||
|
||||
await _repository.InsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
LedgerTimeline.EmitOrchestratorExport(_logger, record);
|
||||
return record;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<OrchestratorExportRecord>> GetByArtifactAsync(string tenantId, string artifactHash, CancellationToken cancellationToken)
|
||||
{
|
||||
return _repository.GetByArtifactAsync(tenantId, artifactHash, cancellationToken);
|
||||
}
|
||||
|
||||
private static JsonObject CreateCanonicalPayload(OrchestratorExportInput input)
|
||||
{
|
||||
var payload = new JsonObject
|
||||
{
|
||||
["tenantId"] = input.TenantId,
|
||||
["runId"] = input.RunId.ToString(),
|
||||
["jobType"] = input.JobType,
|
||||
["artifactHash"] = input.ArtifactHash,
|
||||
["policyHash"] = input.PolicyHash,
|
||||
["startedAt"] = input.StartedAt.ToUniversalTime().ToString("O"),
|
||||
["completedAt"] = input.CompletedAt?.ToUniversalTime().ToString("O"),
|
||||
["status"] = input.Status,
|
||||
["manifestPath"] = input.ManifestPath,
|
||||
["logsPath"] = input.LogsPath
|
||||
};
|
||||
|
||||
return LedgerCanonicalJsonSerializer.Canonicalize(payload);
|
||||
}
|
||||
}
|
||||
9
src/Findings/StellaOps.Findings.Ledger/TASKS.md
Normal file
9
src/Findings/StellaOps.Findings.Ledger/TASKS.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Findings Ledger · Sprint 0120-0000-0001
|
||||
|
||||
| Task ID | Status | Notes | Updated (UTC) |
|
||||
| --- | --- | --- | --- |
|
||||
| LEDGER-29-008 | DOING | Determinism harness, metrics, replay tests | 2025-11-22 |
|
||||
| LEDGER-34-101 | TODO | Orchestrator export linkage | 2025-11-22 |
|
||||
| LEDGER-AIRGAP-56-001 | TODO | Mirror bundle provenance recording | 2025-11-22 |
|
||||
|
||||
Status changes must be mirrored in `docs/implplan/SPRINT_0120_0000_0001_policy_reasoning.md`.
|
||||
@@ -0,0 +1,51 @@
|
||||
-- 006_orchestrator_airgap.sql
|
||||
-- Add orchestrator export provenance and air-gap import provenance tables (LEDGER-34-101, LEDGER-AIRGAP-56-001)
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS orchestrator_exports
|
||||
(
|
||||
tenant_id TEXT NOT NULL,
|
||||
run_id UUID NOT NULL,
|
||||
job_type TEXT NOT NULL,
|
||||
artifact_hash TEXT NOT NULL,
|
||||
policy_hash TEXT NOT NULL,
|
||||
started_at TIMESTAMPTZ NOT NULL,
|
||||
completed_at TIMESTAMPTZ,
|
||||
status TEXT NOT NULL,
|
||||
manifest_path TEXT,
|
||||
logs_path TEXT,
|
||||
merkle_root CHAR(64) NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL,
|
||||
PRIMARY KEY (tenant_id, run_id)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS ix_orchestrator_exports_artifact_run
|
||||
ON orchestrator_exports (tenant_id, artifact_hash, run_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_orchestrator_exports_artifact
|
||||
ON orchestrator_exports (tenant_id, artifact_hash);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS airgap_imports
|
||||
(
|
||||
tenant_id TEXT NOT NULL,
|
||||
bundle_id TEXT NOT NULL,
|
||||
mirror_generation TEXT,
|
||||
merkle_root TEXT NOT NULL,
|
||||
time_anchor TIMESTAMPTZ NOT NULL,
|
||||
publisher TEXT,
|
||||
hash_algorithm TEXT,
|
||||
contents JSONB,
|
||||
imported_at TIMESTAMPTZ NOT NULL,
|
||||
import_operator TEXT,
|
||||
ledger_event_id UUID,
|
||||
PRIMARY KEY (tenant_id, bundle_id, time_anchor)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_airgap_imports_bundle
|
||||
ON airgap_imports (tenant_id, bundle_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_airgap_imports_event
|
||||
ON airgap_imports (tenant_id, ledger_event_id);
|
||||
|
||||
COMMIT;
|
||||
@@ -105,7 +105,8 @@ root.SetHandler(async (FileInfo[] fixtures, string connection, string tenant, in
|
||||
|
||||
var verification = await VerifyLedgerAsync(scope.ServiceProvider, tenant, eventsWritten, cts.Token).ConfigureAwait(false);
|
||||
|
||||
var writeLatencyP95Ms = Percentile(metrics.HistDouble("ledger_write_latency_seconds"), 95) * 1000;
|
||||
var writeDurations = metrics.HistDouble("ledger_write_duration_seconds").Concat(metrics.HistDouble("ledger_write_latency_seconds"));
|
||||
var writeLatencyP95Ms = Percentile(writeDurations, 95) * 1000;
|
||||
var rebuildP95Ms = Percentile(metrics.HistDouble("ledger_projection_rebuild_seconds"), 95) * 1000;
|
||||
var projectionLagSeconds = metrics.GaugeDouble("ledger_projection_lag_seconds").DefaultIfEmpty(0).Max();
|
||||
var backlogEvents = metrics.GaugeLong("ledger_ingest_backlog_events").DefaultIfEmpty(0).Max();
|
||||
|
||||
@@ -36,6 +36,11 @@ public sealed class InlinePolicyEvaluationServiceTests
|
||||
"policy-sha",
|
||||
"affected",
|
||||
7.1m,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
1,
|
||||
new JsonObject { ["deprecated"] = "true" },
|
||||
Guid.NewGuid(),
|
||||
null,
|
||||
@@ -68,6 +73,11 @@ public sealed class InlinePolicyEvaluationServiceTests
|
||||
"policy-sha",
|
||||
"accepted_risk",
|
||||
3.4m,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
1,
|
||||
new JsonObject { ["runtime"] = "contained" },
|
||||
Guid.NewGuid(),
|
||||
"explain://existing",
|
||||
|
||||
@@ -32,6 +32,11 @@ public sealed class LedgerProjectionReducerTests
|
||||
var evaluation = new PolicyEvaluationResult(
|
||||
"triaged",
|
||||
6.5m,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
1,
|
||||
(JsonObject)payload["labels"]!.DeepClone(),
|
||||
payload["explainRef"]!.GetValue<string>(),
|
||||
new JsonArray(payload["explainRef"]!.GetValue<string>()));
|
||||
@@ -62,6 +67,11 @@ public sealed class LedgerProjectionReducerTests
|
||||
"policy-v1",
|
||||
"affected",
|
||||
5.0m,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
1,
|
||||
new JsonObject(),
|
||||
Guid.NewGuid(),
|
||||
null,
|
||||
@@ -82,6 +92,11 @@ public sealed class LedgerProjectionReducerTests
|
||||
var evaluation = new PolicyEvaluationResult(
|
||||
"accepted_risk",
|
||||
existing.Severity,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
existing.RiskEventSequence,
|
||||
(JsonObject)existing.Labels.DeepClone(),
|
||||
null,
|
||||
new JsonArray());
|
||||
@@ -110,6 +125,11 @@ public sealed class LedgerProjectionReducerTests
|
||||
"policy-v1",
|
||||
"triaged",
|
||||
7.1m,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
1,
|
||||
labels,
|
||||
Guid.NewGuid(),
|
||||
null,
|
||||
@@ -133,6 +153,11 @@ public sealed class LedgerProjectionReducerTests
|
||||
var evaluation = new PolicyEvaluationResult(
|
||||
"triaged",
|
||||
existing.Severity,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
existing.RiskEventSequence,
|
||||
(JsonObject)payload["labels"]!.DeepClone(),
|
||||
null,
|
||||
new JsonArray());
|
||||
|
||||
63
src/Graph/AGENTS.md
Normal file
63
src/Graph/AGENTS.md
Normal file
@@ -0,0 +1,63 @@
|
||||
# AGENTS · Graph Module
|
||||
|
||||
## Purpose & Scope
|
||||
- Working directories: `src/Graph/StellaOps.Graph.Api`, `src/Graph/StellaOps.Graph.Indexer`, and `src/Graph/__Tests`.
|
||||
- Modules covered: Graph API (query/search/paths/diff/overlay/export) and Graph Indexer (ingest, snapshot, overlays).
|
||||
- Applicable sprints: `docs/implplan/SPRINT_0207_0001_0001_graph.md`, `docs/implplan/SPRINT_0141_0001_0001_graph_indexer.md`, and any follow-on graph docs sprints (`docs/implplan/SPRINT_0321_0001_0001_docs_modules_graph.md`).
|
||||
|
||||
## Roles
|
||||
- Backend engineer (.NET 10) — API, planners, overlays, exports.
|
||||
- Data/ETL engineer — Indexer ingest, snapshots, overlays.
|
||||
- QA/Perf engineer — deterministic tests, load/fuzz, offline parity.
|
||||
- Docs maintainer — graph API/ops runbooks, Offline Kit notes.
|
||||
|
||||
## Required Reading (treat as read before DOING)
|
||||
- `docs/README.md`
|
||||
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/modules/graph/architecture.md`
|
||||
- `docs/modules/graph/implementation_plan.md`
|
||||
- Sprint doc for current work (e.g., `docs/implplan/SPRINT_0207_0001_0001_graph.md`).
|
||||
- Policy overlay contract refs when touching overlays: `POLICY-ENGINE-30-001..003` (see policy module docs).
|
||||
|
||||
## Determinism & Offline
|
||||
- Default to deterministic ordering for streams/exports; manifest checksums required for `graphml/csv/ndjson` exports.
|
||||
- Timestamps: UTC ISO-8601; avoid wall-clock in tests.
|
||||
- Snapshot/export roots configurable via `STELLAOPS_GRAPH_SNAPSHOT_DIR` or `SbomIngestOptions.SnapshotRootDirectory`.
|
||||
- Offline posture: no external calls beyond allowlisted feeds; prefer cached schemas and local nugets in `local-nugets/`.
|
||||
|
||||
## Data & Environment
|
||||
- Canonical store: MongoDB (>=3.0 driver). Tests use `STELLAOPS_TEST_MONGO_URI`; fallback `mongodb://127.0.0.1:27017`, then Mongo2Go.
|
||||
- Collections: `graph_nodes`, `graph_edges`, `graph_overlays_cache`, `graph_snapshots`, `graph_saved_queries`.
|
||||
- Tenant isolation mandatory on every query and export.
|
||||
|
||||
## Testing Expectations
|
||||
- Unit: node/edge builders, identifier stability, overlay calculators, planners, diff engine.
|
||||
- Integration: ingest → snapshot → query/paths/diff/export end-to-end; RBAC + tenant guards.
|
||||
- Performance: synthetic datasets (~500k nodes / 2M edges) with enforced budgets; capture latency metrics.
|
||||
- Security: RBAC scopes (`graph:read/query/export`), audit logging, rate limiting.
|
||||
- Offline: export/import parity for Offline Kit bundles; deterministic manifests verified in tests.
|
||||
|
||||
## Observability
|
||||
- Metrics to emit: `graph_ingest_lag_seconds`, `graph_tile_latency_seconds`, `graph_query_budget_denied_total`, `graph_overlay_cache_hit_ratio`, clustering counters from architecture doc.
|
||||
- Structured logs with trace IDs; traces for ingest stages and query planner/executor.
|
||||
|
||||
## Coding Standards
|
||||
- Target framework: net10.0 with latest C# preview features.
|
||||
- Use dependency injection; avoid static singletons.
|
||||
- Respect module boundaries; shared libs only if declared in sprint or architecture docs.
|
||||
- Naming: projects `StellaOps.Graph.Api`, `StellaOps.Graph.Indexer`; prefer `Graph*` prefixes for internal components.
|
||||
|
||||
## Coordination & Status
|
||||
- Update sprint Delivery Tracker statuses (TODO → DOING → DONE/BLOCKED) in relevant sprint file.
|
||||
- If a required contract/doc is missing or stale, mark the affected task BLOCKED in the sprint and log under Decisions & Risks; do not pause work waiting for live answers.
|
||||
|
||||
## Run/Test Commands (examples)
|
||||
- Restore: `dotnet restore src/Graph/StellaOps.Graph.Api/StellaOps.Graph.Api.csproj --source ../local-nugets`
|
||||
- Build: `dotnet build src/Graph/StellaOps.Graph.Api/StellaOps.Graph.Api.csproj -c Release`
|
||||
- Tests: `dotnet test src/Graph/__Tests/StellaOps.Graph.Indexer.Tests/StellaOps.Graph.Indexer.Tests.csproj`
|
||||
- Lint/style: follow repo-wide analyzers in `Directory.Build.props` / `.editorconfig`.
|
||||
|
||||
## Evidence
|
||||
- Keep artefacts deterministic; attach manifest hashes in PR/sprint notes when delivering exports or snapshots.
|
||||
- Document new metrics/routes/schemas under `docs/modules/graph` and link from sprint Decisions & Risks.
|
||||
@@ -0,0 +1,471 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.Graph.Indexer.Documents;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
public sealed class GraphAnalyticsEngine
|
||||
{
|
||||
private readonly GraphAnalyticsOptions _options;
|
||||
|
||||
public GraphAnalyticsEngine(GraphAnalyticsOptions options)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
if (_options.MaxPropagationIterations <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(options.MaxPropagationIterations), "must be positive");
|
||||
}
|
||||
}
|
||||
|
||||
public GraphAnalyticsResult Compute(GraphAnalyticsSnapshot snapshot)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(snapshot);
|
||||
|
||||
var topology = BuildTopology(snapshot);
|
||||
var clusters = ComputeClusters(topology);
|
||||
var centrality = ComputeCentrality(topology);
|
||||
|
||||
return new GraphAnalyticsResult(clusters, centrality);
|
||||
}
|
||||
|
||||
private GraphTopology BuildTopology(GraphAnalyticsSnapshot snapshot)
|
||||
{
|
||||
var nodes = snapshot.Nodes
|
||||
.Select(node => new GraphNode(node["id"]!.GetValue<string>(), node["kind"]!.GetValue<string>()))
|
||||
.ToImmutableArray();
|
||||
|
||||
var nodeLookup = nodes.ToImmutableDictionary(n => n.NodeId, n => n.Kind, StringComparer.Ordinal);
|
||||
var adjacency = new Dictionary<string, HashSet<string>>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var node in nodes)
|
||||
{
|
||||
adjacency[node.NodeId] = new HashSet<string>(StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
var resolver = new EdgeEndpointResolver(snapshot.Nodes);
|
||||
foreach (var edge in snapshot.Edges)
|
||||
{
|
||||
if (!resolver.TryResolve(edge, out var source, out var target))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!adjacency.ContainsKey(source) || !adjacency.ContainsKey(target))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Treat the graph as undirected for clustering / centrality to stabilise communities.
|
||||
adjacency[source].Add(target);
|
||||
adjacency[target].Add(source);
|
||||
}
|
||||
|
||||
return new GraphTopology(nodes, adjacency, nodeLookup);
|
||||
}
|
||||
|
||||
private ImmutableArray<ClusterAssignment> ComputeClusters(GraphTopology topology)
|
||||
{
|
||||
var labels = topology.Nodes
|
||||
.OrderBy(n => n.NodeId, StringComparer.Ordinal)
|
||||
.Select(n => (NodeId: n.NodeId, Label: n.NodeId, Kind: n.Kind))
|
||||
.ToArray();
|
||||
|
||||
for (var iteration = 0; iteration < _options.MaxPropagationIterations; iteration++)
|
||||
{
|
||||
var updated = false;
|
||||
foreach (ref var entry in labels.AsSpan())
|
||||
{
|
||||
if (!topology.Adjacency.TryGetValue(entry.NodeId, out var neighbors) || neighbors.Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var best = SelectDominantLabel(neighbors, labels);
|
||||
if (!string.Equals(best, entry.Label, StringComparison.Ordinal))
|
||||
{
|
||||
entry.Label = best;
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!updated)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return labels
|
||||
.OrderBy(t => t.NodeId, StringComparer.Ordinal)
|
||||
.Select(t => new ClusterAssignment(t.NodeId, t.Label, t.Kind))
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static string SelectDominantLabel(IEnumerable<string> neighbors, (string NodeId, string Label, string Kind)[] labels)
|
||||
{
|
||||
var labelCounts = new Dictionary<string, int>(StringComparer.Ordinal);
|
||||
foreach (var neighbor in neighbors)
|
||||
{
|
||||
var neighborLabel = labels.First(t => t.NodeId == neighbor).Label;
|
||||
labelCounts.TryGetValue(neighborLabel, out var count);
|
||||
labelCounts[neighborLabel] = count + 1;
|
||||
}
|
||||
|
||||
var max = labelCounts.Max(kvp => kvp.Value);
|
||||
return labelCounts
|
||||
.Where(kvp => kvp.Value == max)
|
||||
.Select(kvp => kvp.Key)
|
||||
.OrderBy(label => label, StringComparer.Ordinal)
|
||||
.First();
|
||||
}
|
||||
|
||||
private ImmutableArray<CentralityScore> ComputeCentrality(GraphTopology topology)
|
||||
{
|
||||
var degreeScores = new Dictionary<string, double>(StringComparer.Ordinal);
|
||||
foreach (var (nodeId, neighbors) in topology.Adjacency)
|
||||
{
|
||||
degreeScores[nodeId] = neighbors.Count;
|
||||
}
|
||||
|
||||
var betweenness = CalculateBetweenness(topology);
|
||||
|
||||
return topology.Nodes
|
||||
.OrderBy(n => n.NodeId, StringComparer.Ordinal)
|
||||
.Select(n => new CentralityScore(
|
||||
n.NodeId,
|
||||
degreeScores.TryGetValue(n.NodeId, out var degree) ? degree : 0d,
|
||||
betweenness.TryGetValue(n.NodeId, out var between) ? between : 0d,
|
||||
n.Kind))
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private Dictionary<string, double> CalculateBetweenness(GraphTopology topology)
|
||||
{
|
||||
var scores = topology.Nodes.ToDictionary(n => n.NodeId, _ => 0d, StringComparer.Ordinal);
|
||||
if (scores.Count == 0)
|
||||
{
|
||||
return scores;
|
||||
}
|
||||
|
||||
var sampled = topology.Nodes
|
||||
.OrderBy(n => n.NodeId, StringComparer.Ordinal)
|
||||
.Take(Math.Min(_options.BetweennessSampleSize, topology.Nodes.Length))
|
||||
.ToArray();
|
||||
|
||||
foreach (var source in sampled)
|
||||
{
|
||||
var stack = new Stack<string>();
|
||||
var predecessors = new Dictionary<string, List<string>>(StringComparer.Ordinal);
|
||||
var sigma = new Dictionary<string, double>(StringComparer.Ordinal);
|
||||
var distance = new Dictionary<string, int>(StringComparer.Ordinal);
|
||||
var queue = new Queue<string>();
|
||||
|
||||
foreach (var node in topology.Nodes)
|
||||
{
|
||||
predecessors[node.NodeId] = new List<string>();
|
||||
sigma[node.NodeId] = 0;
|
||||
distance[node.NodeId] = -1;
|
||||
}
|
||||
|
||||
sigma[source.NodeId] = 1;
|
||||
distance[source.NodeId] = 0;
|
||||
queue.Enqueue(source.NodeId);
|
||||
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
var v = queue.Dequeue();
|
||||
stack.Push(v);
|
||||
|
||||
foreach (var neighbor in topology.GetNeighbors(v))
|
||||
{
|
||||
if (distance[neighbor] < 0)
|
||||
{
|
||||
distance[neighbor] = distance[v] + 1;
|
||||
queue.Enqueue(neighbor);
|
||||
}
|
||||
|
||||
if (distance[neighbor] == distance[v] + 1)
|
||||
{
|
||||
sigma[neighbor] += sigma[v];
|
||||
predecessors[neighbor].Add(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var delta = topology.Nodes.ToDictionary(n => n.NodeId, _ => 0d, StringComparer.Ordinal);
|
||||
while (stack.Count > 0)
|
||||
{
|
||||
var w = stack.Pop();
|
||||
foreach (var v in predecessors[w])
|
||||
{
|
||||
delta[v] += (sigma[v] / sigma[w]) * (1 + delta[w]);
|
||||
}
|
||||
|
||||
if (!string.Equals(w, source.NodeId, StringComparison.Ordinal))
|
||||
{
|
||||
scores[w] += delta[w];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return scores;
|
||||
}
|
||||
|
||||
private sealed record GraphNode(string NodeId, string Kind);
|
||||
|
||||
private sealed class GraphTopology
|
||||
{
|
||||
public GraphTopology(ImmutableArray<GraphNode> nodes, Dictionary<string, HashSet<string>> adjacency, IReadOnlyDictionary<string, string> kinds)
|
||||
{
|
||||
Nodes = nodes;
|
||||
Adjacency = adjacency;
|
||||
Kinds = kinds;
|
||||
}
|
||||
|
||||
public ImmutableArray<GraphNode> Nodes { get; }
|
||||
public Dictionary<string, HashSet<string>> Adjacency { get; }
|
||||
public IReadOnlyDictionary<string, string> Kinds { get; }
|
||||
|
||||
public IEnumerable<string> GetNeighbors(string nodeId)
|
||||
{
|
||||
if (Adjacency.TryGetValue(nodeId, out var neighbors))
|
||||
{
|
||||
return neighbors;
|
||||
}
|
||||
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class EdgeEndpointResolver
|
||||
{
|
||||
private readonly IReadOnlyDictionary<string, JsonObject> _nodesById;
|
||||
private readonly IReadOnlyDictionary<string, string> _componentNodeByPurl;
|
||||
private readonly IReadOnlyDictionary<string, string> _artifactNodeByDigest;
|
||||
|
||||
public EdgeEndpointResolver(ImmutableArray<JsonObject> nodes)
|
||||
{
|
||||
_nodesById = nodes.ToImmutableDictionary(
|
||||
node => node["id"]!.GetValue<string>(),
|
||||
node => node,
|
||||
StringComparer.Ordinal);
|
||||
|
||||
_componentNodeByPurl = BuildComponentIndex(nodes);
|
||||
_artifactNodeByDigest = BuildArtifactIndex(nodes);
|
||||
}
|
||||
|
||||
public bool TryResolve(JsonObject edge, out string source, out string target)
|
||||
{
|
||||
var kind = edge["kind"]!.GetValue<string>();
|
||||
var canonicalKey = edge["canonical_key"]!.AsObject();
|
||||
|
||||
string? s = null;
|
||||
string? t = null;
|
||||
|
||||
switch (kind)
|
||||
{
|
||||
case "CONTAINS":
|
||||
s = canonicalKey.TryGetPropertyValue("artifact_node_id", out var containsSource)
|
||||
? containsSource?.GetValue<string>()
|
||||
: null;
|
||||
t = canonicalKey.TryGetPropertyValue("component_node_id", out var containsTarget)
|
||||
? containsTarget?.GetValue<string>()
|
||||
: null;
|
||||
break;
|
||||
case "DECLARED_IN":
|
||||
s = canonicalKey.TryGetPropertyValue("component_node_id", out var declaredSource)
|
||||
? declaredSource?.GetValue<string>()
|
||||
: null;
|
||||
t = canonicalKey.TryGetPropertyValue("file_node_id", out var declaredTarget)
|
||||
? declaredTarget?.GetValue<string>()
|
||||
: null;
|
||||
break;
|
||||
case "AFFECTED_BY":
|
||||
s = canonicalKey.TryGetPropertyValue("component_node_id", out var affectedSource)
|
||||
? affectedSource?.GetValue<string>()
|
||||
: null;
|
||||
t = canonicalKey.TryGetPropertyValue("advisory_node_id", out var affectedTarget)
|
||||
? affectedTarget?.GetValue<string>()
|
||||
: null;
|
||||
break;
|
||||
case "VEX_EXEMPTS":
|
||||
s = canonicalKey.TryGetPropertyValue("component_node_id", out var vexSource)
|
||||
? vexSource?.GetValue<string>()
|
||||
: null;
|
||||
t = canonicalKey.TryGetPropertyValue("vex_node_id", out var vexTarget)
|
||||
? vexTarget?.GetValue<string>()
|
||||
: null;
|
||||
break;
|
||||
case "GOVERNS_WITH":
|
||||
s = canonicalKey.TryGetPropertyValue("policy_node_id", out var policySource)
|
||||
? policySource?.GetValue<string>()
|
||||
: null;
|
||||
t = canonicalKey.TryGetPropertyValue("component_node_id", out var policyTarget)
|
||||
? policyTarget?.GetValue<string>()
|
||||
: null;
|
||||
break;
|
||||
case "OBSERVED_RUNTIME":
|
||||
s = canonicalKey.TryGetPropertyValue("runtime_node_id", out var runtimeSource)
|
||||
? runtimeSource?.GetValue<string>()
|
||||
: null;
|
||||
t = canonicalKey.TryGetPropertyValue("component_node_id", out var runtimeTarget)
|
||||
? runtimeTarget?.GetValue<string>()
|
||||
: null;
|
||||
break;
|
||||
case "BUILT_FROM":
|
||||
s = canonicalKey.TryGetPropertyValue("parent_artifact_node_id", out var builtSource)
|
||||
? builtSource?.GetValue<string>()
|
||||
: null;
|
||||
|
||||
if (canonicalKey.TryGetPropertyValue("child_artifact_node_id", out var builtTargetNode) && builtTargetNode is not null)
|
||||
{
|
||||
t = builtTargetNode.GetValue<string>();
|
||||
}
|
||||
else if (canonicalKey.TryGetPropertyValue("child_artifact_digest", out var builtTargetDigest) && builtTargetDigest is not null)
|
||||
{
|
||||
_artifactNodeByDigest.TryGetValue(builtTargetDigest.GetValue<string>(), out t);
|
||||
}
|
||||
|
||||
break;
|
||||
case "DEPENDS_ON":
|
||||
s = canonicalKey.TryGetPropertyValue("component_node_id", out var dependsSource)
|
||||
? dependsSource?.GetValue<string>()
|
||||
: null;
|
||||
|
||||
if (canonicalKey.TryGetPropertyValue("dependency_node_id", out var dependsTargetNode) && dependsTargetNode is not null)
|
||||
{
|
||||
t = dependsTargetNode.GetValue<string>();
|
||||
}
|
||||
else if (canonicalKey.TryGetPropertyValue("dependency_purl", out var dependencyPurl) && dependencyPurl is not null)
|
||||
{
|
||||
_componentNodeByPurl.TryGetValue(dependencyPurl.GetValue<string>(), out t);
|
||||
}
|
||||
|
||||
break;
|
||||
default:
|
||||
s = ExtractFirstNodeId(canonicalKey);
|
||||
t = ExtractSecondNodeId(canonicalKey);
|
||||
break;
|
||||
}
|
||||
|
||||
if (s is null || t is null)
|
||||
{
|
||||
source = string.Empty;
|
||||
target = string.Empty;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!_nodesById.ContainsKey(s) || !_nodesById.ContainsKey(t))
|
||||
{
|
||||
source = string.Empty;
|
||||
target = string.Empty;
|
||||
return false;
|
||||
}
|
||||
|
||||
source = s;
|
||||
target = t;
|
||||
return true;
|
||||
}
|
||||
|
||||
private static Dictionary<string, string> BuildComponentIndex(ImmutableArray<JsonObject> nodes)
|
||||
{
|
||||
var components = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var node in nodes)
|
||||
{
|
||||
if (!string.Equals(node["kind"]!.GetValue<string>(), "component", StringComparison.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!node.TryGetPropertyValue("attributes", out var attributesNode) || attributesNode is not JsonObject attributes)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!attributes.TryGetPropertyValue("purl", out var purlNode) || purlNode is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var purl = purlNode.GetValue<string>();
|
||||
if (!string.IsNullOrWhiteSpace(purl))
|
||||
{
|
||||
components.TryAdd(purl.Trim(), node["id"]!.GetValue<string>());
|
||||
}
|
||||
}
|
||||
|
||||
return components;
|
||||
}
|
||||
|
||||
private static Dictionary<string, string> BuildArtifactIndex(ImmutableArray<JsonObject> nodes)
|
||||
{
|
||||
var artifacts = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var node in nodes)
|
||||
{
|
||||
if (!string.Equals(node["kind"]!.GetValue<string>(), "artifact", StringComparison.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!node.TryGetPropertyValue("attributes", out var attributesNode) || attributesNode is not JsonObject attributes)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!attributes.TryGetPropertyValue("artifact_digest", out var digestNode) || digestNode is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var digest = digestNode.GetValue<string>();
|
||||
if (!string.IsNullOrWhiteSpace(digest))
|
||||
{
|
||||
artifacts.TryAdd(digest.Trim(), node["id"]!.GetValue<string>());
|
||||
}
|
||||
}
|
||||
|
||||
return artifacts;
|
||||
}
|
||||
|
||||
private static string? ExtractFirstNodeId(JsonObject canonicalKey)
|
||||
{
|
||||
foreach (var property in canonicalKey)
|
||||
{
|
||||
if (property.Value is JsonValue value
|
||||
&& value.TryGetValue(out string? candidate)
|
||||
&& candidate is not null
|
||||
&& candidate.StartsWith("gn:", StringComparison.Ordinal))
|
||||
{
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? ExtractSecondNodeId(JsonObject canonicalKey)
|
||||
{
|
||||
var encountered = false;
|
||||
foreach (var property in canonicalKey)
|
||||
{
|
||||
if (property.Value is JsonValue value
|
||||
&& value.TryGetValue(out string? candidate)
|
||||
&& candidate is not null
|
||||
&& candidate.StartsWith("gn:", StringComparison.Ordinal))
|
||||
{
|
||||
if (!encountered)
|
||||
{
|
||||
encountered = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
public sealed class GraphAnalyticsHostedService : BackgroundService
|
||||
{
|
||||
private readonly IGraphAnalyticsPipeline _pipeline;
|
||||
private readonly GraphAnalyticsOptions _options;
|
||||
private readonly ILogger<GraphAnalyticsHostedService> _logger;
|
||||
|
||||
public GraphAnalyticsHostedService(
|
||||
IGraphAnalyticsPipeline pipeline,
|
||||
IOptions<GraphAnalyticsOptions> options,
|
||||
ILogger<GraphAnalyticsHostedService> logger)
|
||||
{
|
||||
_pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
using var clusteringTimer = new PeriodicTimer(_options.ClusterInterval);
|
||||
using var centralityTimer = new PeriodicTimer(_options.CentralityInterval);
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
var clusteringTask = clusteringTimer.WaitForNextTickAsync(stoppingToken).AsTask();
|
||||
var centralityTask = centralityTimer.WaitForNextTickAsync(stoppingToken).AsTask();
|
||||
|
||||
var completed = await Task.WhenAny(clusteringTask, centralityTask).ConfigureAwait(false);
|
||||
if (completed.IsCanceled || stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await _pipeline.RunAsync(new GraphAnalyticsRunContext(ForceBackfill: false), stoppingToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// graceful shutdown
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "graph-indexer: analytics pipeline failed during scheduled run");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,88 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
public sealed class GraphAnalyticsMetrics : IDisposable
|
||||
{
|
||||
public const string MeterName = "StellaOps.Graph.Indexer";
|
||||
public const string MeterVersion = "1.0.0";
|
||||
|
||||
private const string RunsTotalName = "graph_analytics_runs_total";
|
||||
private const string FailuresTotalName = "graph_analytics_failures_total";
|
||||
private const string DurationSecondsName = "graph_analytics_duration_seconds";
|
||||
private const string ClustersTotalName = "graph_analytics_clusters_total";
|
||||
private const string CentralityTotalName = "graph_analytics_centrality_total";
|
||||
|
||||
private readonly Meter _meter;
|
||||
private readonly bool _ownsMeter;
|
||||
private readonly Counter<long> _runsTotal;
|
||||
private readonly Counter<long> _failuresTotal;
|
||||
private readonly Histogram<double> _durationSeconds;
|
||||
private readonly Counter<long> _clustersTotal;
|
||||
private readonly Counter<long> _centralityTotal;
|
||||
private bool _disposed;
|
||||
|
||||
public GraphAnalyticsMetrics()
|
||||
: this(null)
|
||||
{
|
||||
}
|
||||
|
||||
public GraphAnalyticsMetrics(Meter? meter)
|
||||
{
|
||||
_meter = meter ?? new Meter(MeterName, MeterVersion);
|
||||
_ownsMeter = meter is null;
|
||||
|
||||
_runsTotal = _meter.CreateCounter<long>(RunsTotalName, unit: "count", description: "Total analytics runs executed.");
|
||||
_failuresTotal = _meter.CreateCounter<long>(FailuresTotalName, unit: "count", description: "Total analytics runs that failed.");
|
||||
_durationSeconds = _meter.CreateHistogram<double>(DurationSecondsName, unit: "s", description: "Duration of analytics runs.");
|
||||
_clustersTotal = _meter.CreateCounter<long>(ClustersTotalName, unit: "count", description: "Cluster assignments written.");
|
||||
_centralityTotal = _meter.CreateCounter<long>(CentralityTotalName, unit: "count", description: "Centrality scores written.");
|
||||
}
|
||||
|
||||
public void RecordRun(string tenant, bool success, TimeSpan duration, int clusterCount, int centralityCount)
|
||||
{
|
||||
ThrowIfDisposed();
|
||||
|
||||
var tags = new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("tenant", tenant),
|
||||
new("success", success)
|
||||
};
|
||||
|
||||
var tagSpan = tags.AsSpan();
|
||||
_runsTotal.Add(1, tagSpan);
|
||||
if (!success)
|
||||
{
|
||||
_failuresTotal.Add(1, tagSpan);
|
||||
}
|
||||
|
||||
_durationSeconds.Record(duration.TotalSeconds, tagSpan);
|
||||
_clustersTotal.Add(clusterCount, tagSpan);
|
||||
_centralityTotal.Add(centralityCount, tagSpan);
|
||||
}
|
||||
|
||||
private void ThrowIfDisposed()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException(nameof(GraphAnalyticsMetrics));
|
||||
}
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (_ownsMeter)
|
||||
{
|
||||
_meter.Dispose();
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
public sealed class GraphAnalyticsOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Interval for running clustering (Louvain-style label propagation).
|
||||
/// </summary>
|
||||
public TimeSpan ClusterInterval { get; set; } = TimeSpan.FromMinutes(5);
|
||||
|
||||
/// <summary>
|
||||
/// Interval for recomputing centrality metrics (degree + betweenness approximation).
|
||||
/// </summary>
|
||||
public TimeSpan CentralityInterval { get; set; } = TimeSpan.FromMinutes(5);
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of iterations for label propagation.
|
||||
/// </summary>
|
||||
public int MaxPropagationIterations { get; set; } = 6;
|
||||
|
||||
/// <summary>
|
||||
/// Number of seed nodes to sample (deterministically) for betweenness approximation.
|
||||
/// </summary>
|
||||
public int BetweennessSampleSize { get; set; } = 12;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to also write cluster ids onto graph node documents (alongside overlays).
|
||||
/// </summary>
|
||||
public bool WriteClusterAssignmentsToNodes { get; set; } = true;
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
public sealed class GraphAnalyticsPipeline : IGraphAnalyticsPipeline
|
||||
{
|
||||
private readonly GraphAnalyticsEngine _engine;
|
||||
private readonly IGraphSnapshotProvider _snapshotProvider;
|
||||
private readonly IGraphAnalyticsWriter _writer;
|
||||
private readonly GraphAnalyticsMetrics _metrics;
|
||||
private readonly ILogger<GraphAnalyticsPipeline> _logger;
|
||||
|
||||
public GraphAnalyticsPipeline(
|
||||
GraphAnalyticsEngine engine,
|
||||
IGraphSnapshotProvider snapshotProvider,
|
||||
IGraphAnalyticsWriter writer,
|
||||
GraphAnalyticsMetrics metrics,
|
||||
ILogger<GraphAnalyticsPipeline> logger)
|
||||
{
|
||||
_engine = engine ?? throw new ArgumentNullException(nameof(engine));
|
||||
_snapshotProvider = snapshotProvider ?? throw new ArgumentNullException(nameof(snapshotProvider));
|
||||
_writer = writer ?? throw new ArgumentNullException(nameof(writer));
|
||||
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task RunAsync(GraphAnalyticsRunContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var snapshots = await _snapshotProvider.GetPendingSnapshotsAsync(cancellationToken).ConfigureAwait(false);
|
||||
foreach (var snapshot in snapshots)
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var result = _engine.Compute(snapshot);
|
||||
|
||||
await _writer.PersistClusterAssignmentsAsync(snapshot, result.Clusters, cancellationToken).ConfigureAwait(false);
|
||||
await _writer.PersistCentralityAsync(snapshot, result.CentralityScores, cancellationToken).ConfigureAwait(false);
|
||||
await _snapshotProvider.MarkProcessedAsync(snapshot.Tenant, snapshot.SnapshotId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
stopwatch.Stop();
|
||||
_metrics.RecordRun(snapshot.Tenant, success: true, stopwatch.Elapsed, result.Clusters.Length, result.CentralityScores.Length);
|
||||
|
||||
_logger.LogInformation(
|
||||
"graph-indexer: analytics computed for snapshot {SnapshotId} tenant {Tenant} with {ClusterCount} clusters and {CentralityCount} centrality scores in {DurationMs:F2} ms",
|
||||
snapshot.SnapshotId,
|
||||
snapshot.Tenant,
|
||||
result.Clusters.Length,
|
||||
result.CentralityScores.Length,
|
||||
stopwatch.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
stopwatch.Stop();
|
||||
_metrics.RecordRun(snapshot.Tenant, success: false, stopwatch.Elapsed, 0, 0);
|
||||
|
||||
_logger.LogError(
|
||||
ex,
|
||||
"graph-indexer: analytics failed for snapshot {SnapshotId} tenant {Tenant}",
|
||||
snapshot.SnapshotId,
|
||||
snapshot.Tenant);
|
||||
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
public static class GraphAnalyticsServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddGraphAnalyticsPipeline(
|
||||
this IServiceCollection services,
|
||||
Action<GraphAnalyticsOptions>? configureOptions = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
if (configureOptions is not null)
|
||||
{
|
||||
services.Configure(configureOptions);
|
||||
}
|
||||
else
|
||||
{
|
||||
services.Configure<GraphAnalyticsOptions>(_ => { });
|
||||
}
|
||||
|
||||
services.AddSingleton<GraphAnalyticsEngine>(provider =>
|
||||
{
|
||||
var options = provider.GetRequiredService<IOptions<GraphAnalyticsOptions>>();
|
||||
return new GraphAnalyticsEngine(options.Value);
|
||||
});
|
||||
|
||||
services.AddSingleton<GraphAnalyticsMetrics>();
|
||||
services.AddSingleton<IGraphAnalyticsPipeline, GraphAnalyticsPipeline>();
|
||||
services.AddHostedService<GraphAnalyticsHostedService>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.Graph.Indexer.Documents;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
public sealed record GraphAnalyticsSnapshot(
|
||||
string Tenant,
|
||||
string SnapshotId,
|
||||
DateTimeOffset GeneratedAt,
|
||||
ImmutableArray<JsonObject> Nodes,
|
||||
ImmutableArray<JsonObject> Edges);
|
||||
|
||||
public sealed record GraphAnalyticsRunContext(bool ForceBackfill);
|
||||
|
||||
public sealed record ClusterAssignment(string NodeId, string ClusterId, string Kind);
|
||||
|
||||
public sealed record CentralityScore(string NodeId, double Degree, double Betweenness, string Kind);
|
||||
|
||||
public sealed record GraphAnalyticsResult(
|
||||
ImmutableArray<ClusterAssignment> Clusters,
|
||||
ImmutableArray<CentralityScore> CentralityScores);
|
||||
|
||||
public interface IGraphSnapshotProvider
|
||||
{
|
||||
Task<IReadOnlyList<GraphAnalyticsSnapshot>> GetPendingSnapshotsAsync(CancellationToken cancellationToken);
|
||||
Task MarkProcessedAsync(string tenant, string snapshotId, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public interface IGraphAnalyticsWriter
|
||||
{
|
||||
Task PersistClusterAssignmentsAsync(GraphAnalyticsSnapshot snapshot, ImmutableArray<ClusterAssignment> assignments, CancellationToken cancellationToken);
|
||||
Task PersistCentralityAsync(GraphAnalyticsSnapshot snapshot, ImmutableArray<CentralityScore> scores, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public interface IGraphAnalyticsPipeline
|
||||
{
|
||||
Task RunAsync(GraphAnalyticsRunContext context, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
namespace StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
public sealed class GraphAnalyticsWriterOptions
|
||||
{
|
||||
public string ClusterCollectionName { get; set; } = "graph_cluster_overlays";
|
||||
public string CentralityCollectionName { get; set; } = "graph_centrality_overlays";
|
||||
public string NodeCollectionName { get; set; } = "graph_nodes";
|
||||
public bool WriteClusterAssignmentsToNodes { get; set; } = true;
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
public sealed class InMemoryGraphAnalyticsWriter : IGraphAnalyticsWriter
|
||||
{
|
||||
private readonly List<(GraphAnalyticsSnapshot Snapshot, ImmutableArray<ClusterAssignment> Assignments)> _clusters = new();
|
||||
private readonly List<(GraphAnalyticsSnapshot Snapshot, ImmutableArray<CentralityScore> Scores)> _centrality = new();
|
||||
|
||||
public IReadOnlyList<(GraphAnalyticsSnapshot Snapshot, ImmutableArray<ClusterAssignment> Assignments)> ClusterWrites => _clusters;
|
||||
public IReadOnlyList<(GraphAnalyticsSnapshot Snapshot, ImmutableArray<CentralityScore> Scores)> CentralityWrites => _centrality;
|
||||
|
||||
public Task PersistClusterAssignmentsAsync(GraphAnalyticsSnapshot snapshot, ImmutableArray<ClusterAssignment> assignments, CancellationToken cancellationToken)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
_clusters.Add((snapshot, assignments));
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task PersistCentralityAsync(GraphAnalyticsSnapshot snapshot, ImmutableArray<CentralityScore> scores, CancellationToken cancellationToken)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
_centrality.Add((snapshot, scores));
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
public sealed class InMemoryGraphSnapshotProvider : IGraphSnapshotProvider
|
||||
{
|
||||
private readonly ConcurrentQueue<GraphAnalyticsSnapshot> _queue = new();
|
||||
|
||||
public void Enqueue(GraphAnalyticsSnapshot snapshot)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(snapshot);
|
||||
_queue.Enqueue(snapshot);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<GraphAnalyticsSnapshot>> GetPendingSnapshotsAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var list = new List<GraphAnalyticsSnapshot>();
|
||||
while (_queue.TryDequeue(out var snapshot))
|
||||
{
|
||||
list.Add(snapshot);
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyList<GraphAnalyticsSnapshot>>(list.ToImmutableArray());
|
||||
}
|
||||
|
||||
public Task MarkProcessedAsync(string tenant, string snapshotId, CancellationToken cancellationToken)
|
||||
{
|
||||
// No-op for in-memory provider; processing removes items eagerly.
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,116 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Text.Json.Nodes;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
public sealed class MongoGraphAnalyticsWriter : IGraphAnalyticsWriter
|
||||
{
|
||||
private readonly IMongoCollection<BsonDocument> _clusters;
|
||||
private readonly IMongoCollection<BsonDocument> _centrality;
|
||||
private readonly IMongoCollection<BsonDocument> _nodes;
|
||||
private readonly GraphAnalyticsWriterOptions _options;
|
||||
|
||||
public MongoGraphAnalyticsWriter(IMongoDatabase database, GraphAnalyticsWriterOptions? options = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(database);
|
||||
|
||||
_options = options ?? new GraphAnalyticsWriterOptions();
|
||||
_clusters = database.GetCollection<BsonDocument>(_options.ClusterCollectionName);
|
||||
_centrality = database.GetCollection<BsonDocument>(_options.CentralityCollectionName);
|
||||
_nodes = database.GetCollection<BsonDocument>(_options.NodeCollectionName);
|
||||
}
|
||||
|
||||
public async Task PersistClusterAssignmentsAsync(GraphAnalyticsSnapshot snapshot, ImmutableArray<ClusterAssignment> assignments, CancellationToken cancellationToken)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (assignments.Length == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var models = new List<WriteModel<BsonDocument>>(assignments.Length);
|
||||
foreach (var assignment in assignments)
|
||||
{
|
||||
var filter = Builders<BsonDocument>.Filter.And(
|
||||
Builders<BsonDocument>.Filter.Eq("tenant", snapshot.Tenant),
|
||||
Builders<BsonDocument>.Filter.Eq("snapshot_id", snapshot.SnapshotId),
|
||||
Builders<BsonDocument>.Filter.Eq("node_id", assignment.NodeId));
|
||||
|
||||
var document = new BsonDocument
|
||||
{
|
||||
{ "tenant", snapshot.Tenant },
|
||||
{ "snapshot_id", snapshot.SnapshotId },
|
||||
{ "node_id", assignment.NodeId },
|
||||
{ "cluster_id", assignment.ClusterId },
|
||||
{ "kind", assignment.Kind },
|
||||
{ "generated_at", snapshot.GeneratedAt.UtcDateTime }
|
||||
};
|
||||
|
||||
models.Add(new ReplaceOneModel<BsonDocument>(filter, document) { IsUpsert = true });
|
||||
}
|
||||
|
||||
await _clusters.BulkWriteAsync(models, new BulkWriteOptions { IsOrdered = false }, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (_options.WriteClusterAssignmentsToNodes)
|
||||
{
|
||||
await WriteClustersToNodesAsync(assignments, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task PersistCentralityAsync(GraphAnalyticsSnapshot snapshot, ImmutableArray<CentralityScore> scores, CancellationToken cancellationToken)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (scores.Length == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var models = new List<WriteModel<BsonDocument>>(scores.Length);
|
||||
foreach (var score in scores)
|
||||
{
|
||||
var filter = Builders<BsonDocument>.Filter.And(
|
||||
Builders<BsonDocument>.Filter.Eq("tenant", snapshot.Tenant),
|
||||
Builders<BsonDocument>.Filter.Eq("snapshot_id", snapshot.SnapshotId),
|
||||
Builders<BsonDocument>.Filter.Eq("node_id", score.NodeId));
|
||||
|
||||
var document = new BsonDocument
|
||||
{
|
||||
{ "tenant", snapshot.Tenant },
|
||||
{ "snapshot_id", snapshot.SnapshotId },
|
||||
{ "node_id", score.NodeId },
|
||||
{ "kind", score.Kind },
|
||||
{ "degree", score.Degree },
|
||||
{ "betweenness", score.Betweenness },
|
||||
{ "generated_at", snapshot.GeneratedAt.UtcDateTime }
|
||||
};
|
||||
|
||||
models.Add(new ReplaceOneModel<BsonDocument>(filter, document) { IsUpsert = true });
|
||||
}
|
||||
|
||||
await _centrality.BulkWriteAsync(models, new BulkWriteOptions { IsOrdered = false }, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async Task WriteClustersToNodesAsync(IEnumerable<ClusterAssignment> assignments, CancellationToken cancellationToken)
|
||||
{
|
||||
var models = new List<WriteModel<BsonDocument>>();
|
||||
foreach (var assignment in assignments)
|
||||
{
|
||||
var filter = Builders<BsonDocument>.Filter.Eq("id", assignment.NodeId);
|
||||
var update = Builders<BsonDocument>.Update.Set("attributes.cluster_id", assignment.ClusterId);
|
||||
models.Add(new UpdateOneModel<BsonDocument>(filter, update) { IsUpsert = false });
|
||||
}
|
||||
|
||||
if (models.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
await _nodes.BulkWriteAsync(models, new BulkWriteOptions { IsOrdered = false }, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,89 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Incremental;
|
||||
|
||||
public sealed class GraphBackfillMetrics : IDisposable
|
||||
{
|
||||
private const string MeterName = "StellaOps.Graph.Indexer";
|
||||
private const string MeterVersion = "1.0.0";
|
||||
|
||||
private const string ChangesTotalName = "graph_changes_total";
|
||||
private const string BackfillTotalName = "graph_backfill_total";
|
||||
private const string FailuresTotalName = "graph_change_failures_total";
|
||||
private const string LagSecondsName = "graph_change_lag_seconds";
|
||||
|
||||
private readonly Meter _meter;
|
||||
private readonly bool _ownsMeter;
|
||||
private readonly Counter<long> _changesTotal;
|
||||
private readonly Counter<long> _backfillTotal;
|
||||
private readonly Counter<long> _failuresTotal;
|
||||
private readonly Histogram<double> _lagSeconds;
|
||||
private bool _disposed;
|
||||
|
||||
public GraphBackfillMetrics()
|
||||
: this(null)
|
||||
{
|
||||
}
|
||||
|
||||
public GraphBackfillMetrics(Meter? meter)
|
||||
{
|
||||
_meter = meter ?? new Meter(MeterName, MeterVersion);
|
||||
_ownsMeter = meter is null;
|
||||
|
||||
_changesTotal = _meter.CreateCounter<long>(ChangesTotalName, unit: "count", description: "Total change events applied.");
|
||||
_backfillTotal = _meter.CreateCounter<long>(BackfillTotalName, unit: "count", description: "Total backfill events applied.");
|
||||
_failuresTotal = _meter.CreateCounter<long>(FailuresTotalName, unit: "count", description: "Failed change applications.");
|
||||
_lagSeconds = _meter.CreateHistogram<double>(LagSecondsName, unit: "s", description: "Lag between change emission and application.");
|
||||
}
|
||||
|
||||
public void RecordApplied(string tenant, bool backfill, TimeSpan lag, bool success)
|
||||
{
|
||||
ThrowIfDisposed();
|
||||
|
||||
var tags = new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("tenant", tenant),
|
||||
new("backfill", backfill),
|
||||
new("success", success)
|
||||
};
|
||||
|
||||
var tagSpan = tags.AsSpan();
|
||||
_changesTotal.Add(1, tagSpan);
|
||||
if (backfill)
|
||||
{
|
||||
_backfillTotal.Add(1, tagSpan);
|
||||
}
|
||||
|
||||
if (!success)
|
||||
{
|
||||
_failuresTotal.Add(1, tagSpan);
|
||||
}
|
||||
|
||||
_lagSeconds.Record(lag.TotalSeconds, tagSpan);
|
||||
}
|
||||
|
||||
private void ThrowIfDisposed()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
throw new ObjectDisposedException(nameof(GraphBackfillMetrics));
|
||||
}
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (_ownsMeter)
|
||||
{
|
||||
_meter.Dispose();
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Incremental;
|
||||
|
||||
public sealed record GraphChangeEvent(
|
||||
string Tenant,
|
||||
string SnapshotId,
|
||||
string SequenceToken,
|
||||
ImmutableArray<JsonObject> Nodes,
|
||||
ImmutableArray<JsonObject> Edges,
|
||||
bool IsBackfill = false);
|
||||
|
||||
public interface IGraphChangeEventSource
|
||||
{
|
||||
IAsyncEnumerable<GraphChangeEvent> ReadAsync(CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public interface IGraphBackfillSource
|
||||
{
|
||||
IAsyncEnumerable<GraphChangeEvent> ReadBackfillAsync(CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public interface IIdempotencyStore
|
||||
{
|
||||
Task<bool> HasSeenAsync(string sequenceToken, CancellationToken cancellationToken);
|
||||
Task MarkSeenAsync(string sequenceToken, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Incremental;
|
||||
|
||||
public sealed class GraphChangeStreamOptions
|
||||
{
|
||||
public TimeSpan PollInterval { get; set; } = TimeSpan.FromSeconds(5);
|
||||
public TimeSpan BackfillInterval { get; set; } = TimeSpan.FromMinutes(15);
|
||||
public TimeSpan RetryBackoff { get; set; } = TimeSpan.FromSeconds(3);
|
||||
public int MaxRetryAttempts { get; set; } = 3;
|
||||
public int MaxBatchSize { get; set; } = 256;
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Graph.Indexer.Ingestion.Sbom;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Incremental;
|
||||
|
||||
public sealed class GraphChangeStreamProcessor : BackgroundService
|
||||
{
|
||||
private readonly IGraphChangeEventSource _changeSource;
|
||||
private readonly IGraphBackfillSource _backfillSource;
|
||||
private readonly IGraphDocumentWriter _writer;
|
||||
private readonly IIdempotencyStore _idempotencyStore;
|
||||
private readonly GraphChangeStreamOptions _options;
|
||||
private readonly GraphBackfillMetrics _metrics;
|
||||
private readonly ILogger<GraphChangeStreamProcessor> _logger;
|
||||
|
||||
public GraphChangeStreamProcessor(
|
||||
IGraphChangeEventSource changeSource,
|
||||
IGraphBackfillSource backfillSource,
|
||||
IGraphDocumentWriter writer,
|
||||
IIdempotencyStore idempotencyStore,
|
||||
IOptions<GraphChangeStreamOptions> options,
|
||||
GraphBackfillMetrics metrics,
|
||||
ILogger<GraphChangeStreamProcessor> logger)
|
||||
{
|
||||
_changeSource = changeSource ?? throw new ArgumentNullException(nameof(changeSource));
|
||||
_backfillSource = backfillSource ?? throw new ArgumentNullException(nameof(backfillSource));
|
||||
_writer = writer ?? throw new ArgumentNullException(nameof(writer));
|
||||
_idempotencyStore = idempotencyStore ?? throw new ArgumentNullException(nameof(idempotencyStore));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
using var pollTimer = new PeriodicTimer(_options.PollInterval);
|
||||
using var backfillTimer = new PeriodicTimer(_options.BackfillInterval);
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
var pollTask = pollTimer.WaitForNextTickAsync(stoppingToken).AsTask();
|
||||
var backfillTask = backfillTimer.WaitForNextTickAsync(stoppingToken).AsTask();
|
||||
|
||||
var completed = await Task.WhenAny(pollTask, backfillTask).ConfigureAwait(false);
|
||||
if (completed.IsCanceled || stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (completed == pollTask)
|
||||
{
|
||||
await ApplyStreamAsync(isBackfill: false, stoppingToken).ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
await ApplyStreamAsync(isBackfill: true, stoppingToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal async Task ApplyStreamAsync(bool isBackfill, CancellationToken cancellationToken)
|
||||
{
|
||||
var source = isBackfill ? _backfillSource.ReadBackfillAsync(cancellationToken) : _changeSource.ReadAsync(cancellationToken);
|
||||
|
||||
await foreach (var change in source.WithCancellation(cancellationToken))
|
||||
{
|
||||
if (await _idempotencyStore.HasSeenAsync(change.SequenceToken, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var attempts = 0;
|
||||
while (true)
|
||||
{
|
||||
try
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var batch = new GraphBuildBatch(change.Nodes, change.Edges);
|
||||
await _writer.WriteAsync(batch, cancellationToken).ConfigureAwait(false);
|
||||
await _idempotencyStore.MarkSeenAsync(change.SequenceToken, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var collectedAt = change.Nodes
|
||||
.Select(n => n.TryGetPropertyValue("provenance", out var prov) && prov is JsonObject obj && obj.TryGetPropertyValue("collected_at", out var collected) ? collected?.GetValue<string>() : null)
|
||||
.FirstOrDefault(value => !string.IsNullOrWhiteSpace(value));
|
||||
|
||||
var lag = DateTimeOffset.TryParse(collectedAt, CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal, out var parsed)
|
||||
? DateTimeOffset.UtcNow - parsed
|
||||
: TimeSpan.Zero;
|
||||
|
||||
_metrics.RecordApplied(change.Tenant, isBackfill, lag, success: true);
|
||||
break;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
attempts++;
|
||||
_metrics.RecordApplied(change.Tenant, isBackfill, TimeSpan.Zero, success: false);
|
||||
_logger.LogError(ex, "graph-indexer: change stream apply failed for snapshot {SnapshotId} attempt {Attempt}", change.SnapshotId, attempts);
|
||||
|
||||
if (attempts >= _options.MaxRetryAttempts)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
await Task.Delay(_options.RetryBackoff, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Incremental;
|
||||
|
||||
public static class GraphChangeStreamServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddGraphChangeStreamProcessor(
|
||||
this IServiceCollection services,
|
||||
Action<GraphChangeStreamOptions>? configureOptions = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
if (configureOptions is not null)
|
||||
{
|
||||
services.Configure(configureOptions);
|
||||
}
|
||||
else
|
||||
{
|
||||
services.Configure<GraphChangeStreamOptions>(_ => { });
|
||||
}
|
||||
|
||||
services.AddSingleton<GraphBackfillMetrics>();
|
||||
services.AddHostedService<GraphChangeStreamProcessor>();
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
using System.Collections.Concurrent;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Incremental;
|
||||
|
||||
public sealed class InMemoryIdempotencyStore : IIdempotencyStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, byte> _seen = new(StringComparer.Ordinal);
|
||||
|
||||
public Task<bool> HasSeenAsync(string sequenceToken, CancellationToken cancellationToken)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return Task.FromResult(_seen.ContainsKey(sequenceToken));
|
||||
}
|
||||
|
||||
public Task MarkSeenAsync(string sequenceToken, CancellationToken cancellationToken)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
_seen.TryAdd(sequenceToken, 0);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Graph.Indexer.Tests")]
|
||||
@@ -11,6 +11,7 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
using System.Linq;
|
||||
using StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Tests;
|
||||
|
||||
public sealed class GraphAnalyticsEngineTests
|
||||
{
|
||||
[Fact]
|
||||
public void Compute_IsDeterministic_ForLinearGraph()
|
||||
{
|
||||
var snapshot = GraphAnalyticsTestData.CreateLinearSnapshot();
|
||||
var engine = new GraphAnalyticsEngine(new GraphAnalyticsOptions { MaxPropagationIterations = 5, BetweennessSampleSize = 8 });
|
||||
|
||||
var first = engine.Compute(snapshot);
|
||||
var second = engine.Compute(snapshot);
|
||||
|
||||
Assert.Equal(first.Clusters, second.Clusters);
|
||||
Assert.Equal(first.CentralityScores, second.CentralityScores);
|
||||
|
||||
var mainCluster = first.Clusters.First(c => c.NodeId == snapshot.Nodes[0]["id"]!.GetValue<string>()).ClusterId;
|
||||
Assert.All(first.Clusters.Where(c => c.NodeId != snapshot.Nodes[^1]["id"]!.GetValue<string>()), c => Assert.Equal(mainCluster, c.ClusterId));
|
||||
|
||||
var centralNode = first.CentralityScores.OrderByDescending(c => c.Betweenness).First();
|
||||
Assert.True(centralNode.Betweenness > 0);
|
||||
Assert.True(centralNode.Degree >= 2);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Graph.Indexer.Analytics;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Tests;
|
||||
|
||||
public sealed class GraphAnalyticsPipelineTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task RunAsync_WritesClustersAndCentrality()
|
||||
{
|
||||
var snapshot = GraphAnalyticsTestData.CreateLinearSnapshot();
|
||||
|
||||
var provider = new InMemoryGraphSnapshotProvider();
|
||||
provider.Enqueue(snapshot);
|
||||
|
||||
using var metrics = new GraphAnalyticsMetrics();
|
||||
var writer = new InMemoryGraphAnalyticsWriter();
|
||||
var pipeline = new GraphAnalyticsPipeline(
|
||||
new GraphAnalyticsEngine(new GraphAnalyticsOptions()),
|
||||
provider,
|
||||
writer,
|
||||
metrics,
|
||||
NullLogger<GraphAnalyticsPipeline>.Instance);
|
||||
|
||||
await pipeline.RunAsync(new GraphAnalyticsRunContext(false), CancellationToken.None);
|
||||
|
||||
Assert.Single(writer.ClusterWrites);
|
||||
Assert.Single(writer.CentralityWrites);
|
||||
Assert.Equal(snapshot.Nodes.Length, writer.ClusterWrites.Single().Assignments.Length);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.Graph.Indexer.Analytics;
|
||||
using StellaOps.Graph.Indexer.Schema;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Tests;
|
||||
|
||||
internal static class GraphAnalyticsTestData
|
||||
{
|
||||
public static GraphAnalyticsSnapshot CreateLinearSnapshot()
|
||||
{
|
||||
var tenant = "tenant-a";
|
||||
var provenance = new GraphProvenanceSpec("test", DateTimeOffset.UtcNow, "sbom-1", 1);
|
||||
|
||||
var nodeA = GraphDocumentFactory.CreateNode(new GraphNodeSpec(
|
||||
tenant,
|
||||
"component",
|
||||
new Dictionary<string, string> { { "purl", "pkg:npm/a@1.0.0" } },
|
||||
new JsonObject { ["purl"] = "pkg:npm/a@1.0.0" },
|
||||
provenance,
|
||||
DateTimeOffset.UtcNow,
|
||||
null));
|
||||
|
||||
var nodeB = GraphDocumentFactory.CreateNode(new GraphNodeSpec(
|
||||
tenant,
|
||||
"component",
|
||||
new Dictionary<string, string> { { "purl", "pkg:npm/b@1.0.0" } },
|
||||
new JsonObject { ["purl"] = "pkg:npm/b@1.0.0" },
|
||||
provenance,
|
||||
DateTimeOffset.UtcNow,
|
||||
null));
|
||||
|
||||
var nodeC = GraphDocumentFactory.CreateNode(new GraphNodeSpec(
|
||||
tenant,
|
||||
"component",
|
||||
new Dictionary<string, string> { { "purl", "pkg:npm/c@1.0.0" } },
|
||||
new JsonObject { ["purl"] = "pkg:npm/c@1.0.0" },
|
||||
provenance,
|
||||
DateTimeOffset.UtcNow,
|
||||
null));
|
||||
|
||||
var nodeD = GraphDocumentFactory.CreateNode(new GraphNodeSpec(
|
||||
tenant,
|
||||
"component",
|
||||
new Dictionary<string, string> { { "purl", "pkg:npm/d@1.0.0" } },
|
||||
new JsonObject { ["purl"] = "pkg:npm/d@1.0.0" },
|
||||
provenance,
|
||||
DateTimeOffset.UtcNow,
|
||||
null));
|
||||
|
||||
var edgeAB = CreateDependsOnEdge(tenant, nodeA["id"]!.GetValue<string>(), nodeB["id"]!.GetValue<string>(), provenance);
|
||||
var edgeBC = CreateDependsOnEdge(tenant, nodeB["id"]!.GetValue<string>(), nodeC["id"]!.GetValue<string>(), provenance);
|
||||
|
||||
return new GraphAnalyticsSnapshot(
|
||||
tenant,
|
||||
"snapshot-1",
|
||||
DateTimeOffset.UtcNow,
|
||||
ImmutableArray.Create(nodeA, nodeB, nodeC, nodeD),
|
||||
ImmutableArray.Create(edgeAB, edgeBC));
|
||||
}
|
||||
|
||||
private static JsonObject CreateDependsOnEdge(string tenant, string sourceNodeId, string dependencyNodeId, GraphProvenanceSpec provenance)
|
||||
{
|
||||
return GraphDocumentFactory.CreateEdge(new GraphEdgeSpec(
|
||||
tenant,
|
||||
"DEPENDS_ON",
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
{ "component_node_id", sourceNodeId },
|
||||
{ "dependency_node_id", dependencyNodeId }
|
||||
},
|
||||
new JsonObject(),
|
||||
provenance,
|
||||
DateTimeOffset.UtcNow,
|
||||
null));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Graph.Indexer.Incremental;
|
||||
using StellaOps.Graph.Indexer.Ingestion.Sbom;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Tests;
|
||||
|
||||
public sealed class GraphChangeStreamProcessorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ApplyStreamAsync_SkipsDuplicates_AndRetries()
|
||||
{
|
||||
var tenant = "tenant-a";
|
||||
var nodes = ImmutableArray.Create(new JsonObject { ["id"] = "gn:tenant-a:component:a", ["kind"] = "component" });
|
||||
var edges = ImmutableArray<JsonObject>.Empty;
|
||||
|
||||
var events = new List<GraphChangeEvent>
|
||||
{
|
||||
new(tenant, "snap-1", "seq-1", nodes, edges, false),
|
||||
new(tenant, "snap-1", "seq-1", nodes, edges, false), // duplicate
|
||||
new(tenant, "snap-1", "seq-2", nodes, edges, false)
|
||||
};
|
||||
|
||||
var changeSource = new FakeChangeSource(events);
|
||||
var backfillSource = new FakeChangeSource(Array.Empty<GraphChangeEvent>());
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
var writer = new FlakyWriter(failFirst: true);
|
||||
using var metrics = new GraphBackfillMetrics();
|
||||
|
||||
var options = Options.Create(new GraphChangeStreamOptions
|
||||
{
|
||||
MaxRetryAttempts = 3,
|
||||
RetryBackoff = TimeSpan.FromMilliseconds(10)
|
||||
});
|
||||
|
||||
var processor = new GraphChangeStreamProcessor(
|
||||
changeSource,
|
||||
backfillSource,
|
||||
writer,
|
||||
store,
|
||||
options,
|
||||
metrics,
|
||||
NullLogger<GraphChangeStreamProcessor>.Instance);
|
||||
|
||||
await processor.ApplyStreamAsync(isBackfill: false, CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, writer.BatchCount); // duplicate skipped
|
||||
Assert.True(writer.SucceededAfterRetry);
|
||||
}
|
||||
|
||||
private sealed class FakeChangeSource : IGraphChangeEventSource, IGraphBackfillSource
|
||||
{
|
||||
private readonly IReadOnlyList<GraphChangeEvent> _events;
|
||||
|
||||
public FakeChangeSource(IReadOnlyList<GraphChangeEvent> events)
|
||||
{
|
||||
_events = events;
|
||||
}
|
||||
|
||||
public IAsyncEnumerable<GraphChangeEvent> ReadAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
return EmitAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public IAsyncEnumerable<GraphChangeEvent> ReadBackfillAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
return EmitAsync(cancellationToken);
|
||||
}
|
||||
|
||||
private async IAsyncEnumerable<GraphChangeEvent> EmitAsync([System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken)
|
||||
{
|
||||
foreach (var change in _events)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
yield return change;
|
||||
await Task.Yield();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FlakyWriter : IGraphDocumentWriter
|
||||
{
|
||||
private readonly bool _failFirst;
|
||||
private int _attempts;
|
||||
|
||||
public FlakyWriter(bool failFirst)
|
||||
{
|
||||
_failFirst = failFirst;
|
||||
}
|
||||
|
||||
public int BatchCount { get; private set; }
|
||||
public bool SucceededAfterRetry => _attempts > 1 && BatchCount > 0;
|
||||
|
||||
public Task WriteAsync(GraphBuildBatch batch, CancellationToken cancellationToken)
|
||||
{
|
||||
_attempts++;
|
||||
if (_failFirst && _attempts == 1)
|
||||
{
|
||||
throw new InvalidOperationException("simulated failure");
|
||||
}
|
||||
|
||||
BatchCount++;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -9,5 +9,8 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../StellaOps.Graph.Indexer/StellaOps.Graph.Indexer.csproj" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Provenance.Attestation;
|
||||
|
||||
@@ -9,13 +8,49 @@ public sealed record PromotionPredicate(
|
||||
string VexDigest,
|
||||
string PromotionId,
|
||||
string? RekorEntry = null,
|
||||
IReadOnlyDictionary<string,string>? Metadata = null);
|
||||
IReadOnlyDictionary<string, string>? Metadata = null);
|
||||
|
||||
public sealed record PromotionAttestation(
|
||||
PromotionPredicate Predicate,
|
||||
byte[] Payload,
|
||||
SignResult Signature);
|
||||
|
||||
public static class PromotionAttestationBuilder
|
||||
{
|
||||
public const string PredicateType = "stella.ops/promotion@v1";
|
||||
public const string ContentType = "application/vnd.stella.promotion+json";
|
||||
|
||||
public static byte[] CreateCanonicalJson(PromotionPredicate predicate)
|
||||
{
|
||||
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
|
||||
return CanonicalJson.SerializeToUtf8Bytes(predicate);
|
||||
}
|
||||
|
||||
public static async Task<PromotionAttestation> BuildAsync(
|
||||
PromotionPredicate predicate,
|
||||
ISigner signer,
|
||||
IReadOnlyDictionary<string, string>? claims = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
|
||||
if (signer is null) throw new ArgumentNullException(nameof(signer));
|
||||
|
||||
var payload = CreateCanonicalJson(predicate);
|
||||
|
||||
// ensure predicate type claim is always present
|
||||
var mergedClaims = claims is null
|
||||
? new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
: new Dictionary<string, string>(claims, StringComparer.Ordinal);
|
||||
mergedClaims["predicateType"] = PredicateType;
|
||||
|
||||
var request = new SignRequest(
|
||||
Payload: payload,
|
||||
ContentType: ContentType,
|
||||
Claims: mergedClaims,
|
||||
RequiredClaims: new[] { "predicateType" });
|
||||
|
||||
var signature = await signer.SignAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return new PromotionAttestation(predicate, payload, signature);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,6 +147,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Ingestion.Telemet
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "..\Aoc\__Libraries\StellaOps.Aoc\StellaOps.Aoc.csproj", "{8237425A-933A-440E-AE6B-1DF57F228681}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Php", "__Libraries\StellaOps.Scanner.Analyzers.Lang.Php\StellaOps.Scanner.Analyzers.Lang.Php.csproj", "{0262C376-6C43-4A69-86EA-74C228BC0F36}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Php.Tests", "__Tests\StellaOps.Scanner.Analyzers.Lang.Php.Tests\StellaOps.Scanner.Analyzers.Lang.Php.Tests.csproj", "{F4A239E0-AC66-4105-8423-4805B2029ABE}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -985,6 +989,30 @@ Global
|
||||
{8237425A-933A-440E-AE6B-1DF57F228681}.Release|x64.Build.0 = Release|Any CPU
|
||||
{8237425A-933A-440E-AE6B-1DF57F228681}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{8237425A-933A-440E-AE6B-1DF57F228681}.Release|x86.Build.0 = Release|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Release|x64.Build.0 = Release|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36}.Release|x86.Build.0 = Release|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Release|x64.Build.0 = Release|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE}.Release|x86.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
@@ -1032,5 +1060,7 @@ Global
|
||||
{C2B2B38A-D67D-429E-BB2E-023E25EBD7D3} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{482026BC-2E89-4789-8A73-523FAAC8476F} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{E0104A8E-2C39-48C1-97EC-66C171310944} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{0262C376-6C43-4A69-86EA-74C228BC0F36} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{F4A239E0-AC66-4105-8423-4805B2029ABE} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
||||
@@ -174,6 +174,9 @@ public sealed class DenoLanguageAnalyzer : ILanguageAnalyzer
|
||||
metadata: runtimeMeta,
|
||||
view: "runtime");
|
||||
|
||||
analysisStore.Set(ScanAnalysisKeys.DenoRuntimePayload, payload);
|
||||
|
||||
// Backward compatibility with early runtime experiments that used a string key.
|
||||
analysisStore.Set("deno.runtime", payload);
|
||||
|
||||
// Also emit policy signals into AnalysisStore for downstream consumption.
|
||||
|
||||
@@ -24,123 +24,460 @@ internal static class DenoRuntimeShim
|
||||
|
||||
// NOTE: This shim is intentionally self contained and avoids network calls.
|
||||
private const string ShimSource = """
|
||||
// @ts-nocheck
|
||||
// deno-runtime trace shim (offline, deterministic)
|
||||
// Emits module load, permission use, npm resolution, and wasm load events.
|
||||
const events: Array<Record<string, unknown>> = [];
|
||||
const cwd = Deno.cwd().replace(/\\/g, "/");
|
||||
const entrypointEnv = Deno.env.get("STELLA_DENO_ENTRYPOINT") ?? "";
|
||||
|
||||
type ModuleRef = { normalized: string; path_sha256: string };
|
||||
|
||||
function nowIso(): string {
|
||||
return new Date().toISOString();
|
||||
}
|
||||
|
||||
function addEvent(evt: Record<string, unknown>) {
|
||||
// Deterministic key order via stringify on object literal insertion order.
|
||||
events.push(evt);
|
||||
}
|
||||
|
||||
function hashPath(input: string): string {
|
||||
const data = new TextEncoder().encode(input);
|
||||
const hash = crypto.subtle.digestSync("SHA-256", data);
|
||||
return Array.from(new Uint8Array(hash))
|
||||
function toHex(bytes: Uint8Array): string {
|
||||
return Array.from(bytes)
|
||||
.map((b) => b.toString(16).padStart(2, "0"))
|
||||
.join("");
|
||||
}
|
||||
|
||||
function relPath(abs: string): { normalized: string; path_sha256: string } {
|
||||
const cwd = Deno.cwd();
|
||||
const rel = abs.startsWith(cwd) ? abs.slice(cwd.length + 1) : abs;
|
||||
const normalized = rel.replaceAll("\\", "/");
|
||||
return { normalized, path_sha256: hashPath(normalized) };
|
||||
// Minimal synchronous SHA-256 (no async crypto required)
|
||||
function sha256Hex(value: string): string {
|
||||
const data = new TextEncoder().encode(value);
|
||||
const k = new Uint32Array([
|
||||
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4,
|
||||
0xab1c5ed5, 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe,
|
||||
0x9bdc06a7, 0xc19bf174, 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f,
|
||||
0x4a7484aa, 0x5cb0a9dc, 0x76f988da, 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,
|
||||
0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc,
|
||||
0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, 0xa2bfe8a1, 0xa81a664b,
|
||||
0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, 0x19a4c116,
|
||||
0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
|
||||
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7,
|
||||
0xc67178f2,
|
||||
]);
|
||||
|
||||
const h = new Uint32Array([
|
||||
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
|
||||
0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
|
||||
]);
|
||||
|
||||
const bitLength = data.length * 8;
|
||||
const paddedLength = (((data.length + 9 + 63) >> 6) << 6);
|
||||
const buffer = new Uint8Array(paddedLength);
|
||||
buffer.set(data);
|
||||
buffer[data.length] = 0x80;
|
||||
|
||||
const view = new DataView(buffer.buffer);
|
||||
const high = Math.floor(bitLength / 0x100000000);
|
||||
const low = bitLength >>> 0;
|
||||
view.setUint32(paddedLength - 8, high, false);
|
||||
view.setUint32(paddedLength - 4, low, false);
|
||||
|
||||
const w = new Uint32Array(64);
|
||||
for (let offset = 0; offset < paddedLength; offset += 64) {
|
||||
for (let i = 0; i < 16; i++) {
|
||||
w[i] = view.getUint32(offset + i * 4, false);
|
||||
}
|
||||
for (let i = 16; i < 64; i++) {
|
||||
const s0 = rotateRight(w[i - 15], 7) ^ rotateRight(w[i - 15], 18) ^ (w[i - 15] >>> 3);
|
||||
const s1 = rotateRight(w[i - 2], 17) ^ rotateRight(w[i - 2], 19) ^ (w[i - 2] >>> 10);
|
||||
w[i] = (w[i - 16] + s0 + w[i - 7] + s1) >>> 0;
|
||||
}
|
||||
|
||||
let [a, b, c, d, e, f, g, hh] = h;
|
||||
for (let i = 0; i < 64; i++) {
|
||||
const S1 = rotateRight(e, 6) ^ rotateRight(e, 11) ^ rotateRight(e, 25);
|
||||
const ch = (e & f) ^ (~e & g);
|
||||
const temp1 = (hh + S1 + ch + k[i] + w[i]) >>> 0;
|
||||
const S0 = rotateRight(a, 2) ^ rotateRight(a, 13) ^ rotateRight(a, 22);
|
||||
const maj = (a & b) ^ (a & c) ^ (b & c);
|
||||
const temp2 = (S0 + maj) >>> 0;
|
||||
|
||||
hh = g;
|
||||
g = f;
|
||||
f = e;
|
||||
e = (d + temp1) >>> 0;
|
||||
d = c;
|
||||
c = b;
|
||||
b = a;
|
||||
a = (temp1 + temp2) >>> 0;
|
||||
}
|
||||
|
||||
h[0] = (h[0] + a) >>> 0;
|
||||
h[1] = (h[1] + b) >>> 0;
|
||||
h[2] = (h[2] + c) >>> 0;
|
||||
h[3] = (h[3] + d) >>> 0;
|
||||
h[4] = (h[4] + e) >>> 0;
|
||||
h[5] = (h[5] + f) >>> 0;
|
||||
h[6] = (h[6] + g) >>> 0;
|
||||
h[7] = (h[7] + hh) >>> 0;
|
||||
}
|
||||
|
||||
const out = new Uint8Array(32);
|
||||
const viewOut = new DataView(out.buffer);
|
||||
for (let i = 0; i < 8; i++) {
|
||||
viewOut.setUint32(i * 4, h[i], false);
|
||||
}
|
||||
|
||||
return toHex(out);
|
||||
}
|
||||
|
||||
// Wrap permission requests
|
||||
const originalPermissions = Deno.permissions;
|
||||
Deno.permissions = {
|
||||
...originalPermissions,
|
||||
request: async (...args: Parameters<typeof originalPermissions.request>) => {
|
||||
const res = await originalPermissions.request(...args);
|
||||
const name = args[0]?.name ?? "unknown";
|
||||
const module = relPath(import.meta.url);
|
||||
addEvent({
|
||||
type: "deno.permission.use",
|
||||
ts: nowIso(),
|
||||
permission: name,
|
||||
module,
|
||||
details: "permissions.request",
|
||||
});
|
||||
return res;
|
||||
},
|
||||
query: (...args: Parameters<typeof originalPermissions.query>) =>
|
||||
originalPermissions.query(...args),
|
||||
revoke: (...args: Parameters<typeof originalPermissions.revoke>) =>
|
||||
originalPermissions.revoke(...args),
|
||||
};
|
||||
function rotateRight(value: number, bits: number): number {
|
||||
return ((value >>> bits) | (value << (32 - bits))) >>> 0;
|
||||
}
|
||||
|
||||
// Hook dynamic import calls by wrapping import()
|
||||
const originalImport = globalThis.import ?? ((specifier: string) => import(specifier));
|
||||
globalThis.import = async (specifier: string) => {
|
||||
const mod = typeof specifier === "string" ? specifier : String(specifier);
|
||||
addEvent({
|
||||
function normalizePermission(name: string | undefined): string {
|
||||
const normalized = (name ?? "").toLowerCase();
|
||||
switch (normalized) {
|
||||
case "read":
|
||||
case "write":
|
||||
return "fs";
|
||||
case "net":
|
||||
return "net";
|
||||
case "env":
|
||||
return "env";
|
||||
case "ffi":
|
||||
return "ffi";
|
||||
case "run":
|
||||
case "sys":
|
||||
case "hrtime":
|
||||
return "process";
|
||||
case "worker":
|
||||
return "worker";
|
||||
default:
|
||||
return normalized || "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
const grantedPermissions = new Set<string>();
|
||||
const originalPermissions = Deno.permissions;
|
||||
|
||||
async function primePermissionSnapshot() {
|
||||
const descriptors: Array<Deno.PermissionDescriptor> = [
|
||||
{ name: "read" },
|
||||
{ name: "write" },
|
||||
{ name: "net" },
|
||||
{ name: "env" },
|
||||
{ name: "ffi" },
|
||||
{ name: "run" as Deno.PermissionName },
|
||||
{ name: "sys" as Deno.PermissionName },
|
||||
{ name: "hrtime" as Deno.PermissionName },
|
||||
];
|
||||
|
||||
for (const descriptor of descriptors) {
|
||||
try {
|
||||
const status = await originalPermissions.query(descriptor as Deno.PermissionDescriptor);
|
||||
if (status?.state === "granted") {
|
||||
grantedPermissions.add(normalizePermission(descriptor.name as string));
|
||||
}
|
||||
} catch (_) {
|
||||
// ignore permission probes that are unsupported in the current runtime
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function snapshotPermissions(): string[] {
|
||||
return Array.from(grantedPermissions).sort();
|
||||
}
|
||||
|
||||
function relativePath(path: string): string {
|
||||
let candidate = path.replace(/\\/g, "/");
|
||||
|
||||
if (candidate.startsWith("file://")) {
|
||||
candidate = candidate.slice("file://".length);
|
||||
}
|
||||
|
||||
if (!candidate.startsWith("/") && !/^([A-Za-z]:\\\\|[A-Za-z]:\\/)/.test(candidate)) {
|
||||
candidate = `${cwd}/${candidate}`;
|
||||
}
|
||||
|
||||
if (candidate.startsWith(cwd)) {
|
||||
const offset = cwd.endsWith("/") ? cwd.length : cwd.length + 1;
|
||||
candidate = candidate.slice(offset);
|
||||
}
|
||||
|
||||
candidate = candidate.replace(/^\.\//, "").replace(/^\/+/, "");
|
||||
return candidate.replace(/\\/g, "/") || ".";
|
||||
}
|
||||
|
||||
function toFileUrl(path: string): URL {
|
||||
const normalized = path.replace(/\\/g, "/");
|
||||
if (normalized.startsWith("file://")) {
|
||||
return new URL(normalized);
|
||||
}
|
||||
|
||||
const absolute = normalized.startsWith("/") || /^([A-Za-z]:\\\\|[A-Za-z]:\\/)/.test(normalized)
|
||||
? normalized
|
||||
: `${cwd}/${normalized}`;
|
||||
|
||||
const prefix = absolute.startsWith("/") ? "file://" : "file:///";
|
||||
return new URL(prefix + encodeURI(absolute.replace(/#/g, "%23")));
|
||||
}
|
||||
|
||||
function normalizeModule(specifier: string): ModuleRef {
|
||||
try {
|
||||
const url = new URL(specifier);
|
||||
if (url.protocol === "file:") {
|
||||
const rel = relativePath(decodeURIComponent(url.pathname));
|
||||
return { normalized: rel, path_sha256: sha256Hex(rel) };
|
||||
}
|
||||
|
||||
if (url.protocol === "http:" || url.protocol === "https:") {
|
||||
const normalized = `${url.protocol}//${url.host}${url.pathname}`;
|
||||
return { normalized, path_sha256: sha256Hex(normalized) };
|
||||
}
|
||||
|
||||
if (url.protocol === "npm:") {
|
||||
const normalized = `npm:${url.pathname.replace(/^\//, "")}`;
|
||||
return { normalized, path_sha256: sha256Hex(normalized) };
|
||||
}
|
||||
} catch (_err) {
|
||||
// not a URL; treat as path
|
||||
}
|
||||
|
||||
const rel = relativePath(specifier);
|
||||
return { normalized: rel, path_sha256: sha256Hex(rel) };
|
||||
}
|
||||
|
||||
function extractOrigin(specifier: string): string | undefined {
|
||||
try {
|
||||
const url = new URL(specifier);
|
||||
if (url.protocol === "http:" || url.protocol === "https:") {
|
||||
return `${url.protocol}//${url.host}${url.pathname}`;
|
||||
}
|
||||
if (url.protocol === "npm:") {
|
||||
return `npm:${url.pathname.replace(/^\//, "")}`;
|
||||
}
|
||||
} catch (_) {
|
||||
return undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function addEvent(evt: Record<string, unknown>) {
|
||||
events.push(evt);
|
||||
}
|
||||
|
||||
function recordModuleLoad(specifier: string, reason: string, permissions?: string[]) {
|
||||
const module = normalizeModule(specifier);
|
||||
const origin = extractOrigin(specifier);
|
||||
const event: Record<string, unknown> = {
|
||||
type: "deno.module.load",
|
||||
ts: nowIso(),
|
||||
module: relPath(mod),
|
||||
reason: "dynamic-import",
|
||||
permissions: [],
|
||||
origin: mod.startsWith("http") ? mod : undefined,
|
||||
});
|
||||
return originalImport(specifier);
|
||||
};
|
||||
module,
|
||||
reason,
|
||||
permissions: permissions ?? snapshotPermissions(),
|
||||
};
|
||||
|
||||
if (origin) {
|
||||
event.origin = origin;
|
||||
}
|
||||
|
||||
addEvent(event);
|
||||
|
||||
if (specifier.startsWith("npm:")) {
|
||||
recordNpmResolution(specifier);
|
||||
}
|
||||
}
|
||||
|
||||
function recordPermissionUse(permission: string, details: string, module?: ModuleRef) {
|
||||
const normalizedPermission = normalizePermission(permission);
|
||||
if (normalizedPermission && normalizedPermission !== "unknown") {
|
||||
grantedPermissions.add(normalizedPermission);
|
||||
}
|
||||
|
||||
// Hook WebAssembly loads
|
||||
const originalInstantiate = WebAssembly.instantiate;
|
||||
WebAssembly.instantiate = async (
|
||||
bufferSource: BufferSource | WebAssembly.Module,
|
||||
importObject?: WebAssembly.Imports,
|
||||
) => {
|
||||
addEvent({
|
||||
type: "deno.wasm.load",
|
||||
type: "deno.permission.use",
|
||||
ts: nowIso(),
|
||||
module: relPath("wasm://buffer"),
|
||||
importer: relPath(import.meta.url).normalized,
|
||||
reason: "instantiate",
|
||||
permission: normalizedPermission,
|
||||
module: module ?? normalizeModule(entrypointEnv || "shim://runtime"),
|
||||
details,
|
||||
});
|
||||
return originalInstantiate(bufferSource, importObject);
|
||||
};
|
||||
}
|
||||
|
||||
function recordNpmResolution(specifier: string) {
|
||||
const bare = specifier.replace(/^npm:/, "");
|
||||
const [pkg, version] = bare.split("@");
|
||||
const denoDir = (Deno.env.get("DENO_DIR") ?? "").replace(/\\/g, "/");
|
||||
const resolved = denoDir
|
||||
? `file://${denoDir}/npm/registry.npmjs.org/${pkg ?? bare}/${version ?? ""}`
|
||||
: `npm:${bare}`;
|
||||
|
||||
// Capture npm resolution hints from env when present
|
||||
const npmMeta = Deno.env.get("STELLA_NPM_SPECIFIER");
|
||||
if (npmMeta) {
|
||||
addEvent({
|
||||
type: "deno.npm.resolution",
|
||||
ts: nowIso(),
|
||||
specifier: npmMeta,
|
||||
package: npmMeta,
|
||||
version: "",
|
||||
resolved: "file://$DENO_DIR/npm",
|
||||
specifier: `npm:${bare}`,
|
||||
package: pkg ?? bare,
|
||||
version: version ?? "",
|
||||
resolved,
|
||||
exists: true,
|
||||
});
|
||||
}
|
||||
|
||||
// Write NDJSON on exit
|
||||
function flush() {
|
||||
const sorted = events.sort((a, b) => {
|
||||
const at = String(a.ts);
|
||||
const bt = String(b.ts);
|
||||
if (at === bt) return String(a.type).localeCompare(String(b.type));
|
||||
return at.localeCompare(bt);
|
||||
function recordWasmLoad(moduleSpecifier: string, importer: string, reason: string) {
|
||||
addEvent({
|
||||
type: "deno.wasm.load",
|
||||
ts: nowIso(),
|
||||
module: normalizeModule(moduleSpecifier),
|
||||
importer,
|
||||
reason,
|
||||
});
|
||||
const data = sorted.map((e) => JSON.stringify(e)).join("\\n") + "\\n";
|
||||
Deno.writeTextFileSync("deno-runtime.ndjson", data);
|
||||
}
|
||||
|
||||
addEvent({
|
||||
type: "deno.runtime.start",
|
||||
ts: nowIso(),
|
||||
module: relPath(import.meta.url),
|
||||
reason: "shim-start",
|
||||
});
|
||||
function hookModuleLoader(): boolean {
|
||||
try {
|
||||
const internal = (Deno as unknown as Record<string, unknown>)[Symbol.for("Deno.internal") as unknown as string]
|
||||
?? (Deno as unknown as Record<string, unknown>).internal;
|
||||
const loader = (internal as Record<string, unknown>)?.moduleLoader as Record<string, unknown> | undefined;
|
||||
if (!loader || typeof loader.load !== "function") {
|
||||
return false;
|
||||
}
|
||||
|
||||
globalThis.addEventListener("unload", () => {
|
||||
flush();
|
||||
});
|
||||
const originalLoad = loader.load.bind(loader) as (...args: unknown[]) => Promise<unknown>;
|
||||
loader.load = async (...args: unknown[]) => {
|
||||
const specifier = String(args[0] ?? "");
|
||||
const isDynamic = Boolean(args[2]);
|
||||
const reason = specifier.startsWith("npm:") ? "npm" : isDynamic ? "dynamic-import" : "static-import";
|
||||
recordModuleLoad(specifier, reason);
|
||||
return await originalLoad(...args);
|
||||
};
|
||||
|
||||
return true;
|
||||
} catch (err) {
|
||||
addEvent({ type: "deno.runtime.error", ts: nowIso(), message: String(err?.message ?? err) });
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function wrapPermissions(entryModule: ModuleRef) {
|
||||
Deno.permissions = {
|
||||
...originalPermissions,
|
||||
request: async (...args: Parameters<typeof originalPermissions.request>) => {
|
||||
const status = await originalPermissions.request(...args);
|
||||
recordPermissionUse(args[0]?.name ?? "unknown", "permissions.request", entryModule);
|
||||
if (status?.state === "granted") {
|
||||
grantedPermissions.add(normalizePermission(args[0]?.name));
|
||||
}
|
||||
return status;
|
||||
},
|
||||
query: async (...args: Parameters<typeof originalPermissions.query>) => {
|
||||
const status = await originalPermissions.query(...args);
|
||||
if (status?.state === "granted") {
|
||||
grantedPermissions.add(normalizePermission(args[0]?.name));
|
||||
}
|
||||
return status;
|
||||
},
|
||||
revoke: async (...args: Parameters<typeof originalPermissions.revoke>) => {
|
||||
const status = await originalPermissions.revoke(...args);
|
||||
grantedPermissions.delete(normalizePermission(args[0]?.name));
|
||||
return status;
|
||||
},
|
||||
} as typeof Deno.permissions;
|
||||
}
|
||||
|
||||
function wrapDlopen(entryModule: ModuleRef) {
|
||||
const original = (Deno as unknown as Record<string, unknown>).dlopen as
|
||||
| ((path: string | URL, symbols: Record<string, string | Deno.ForeignFunctionDefinition>) => unknown)
|
||||
| undefined;
|
||||
|
||||
if (typeof original !== "function") {
|
||||
return;
|
||||
}
|
||||
|
||||
(Deno as unknown as Record<string, unknown>).dlopen = (path: string | URL, symbols: Record<string, string | Deno.ForeignFunctionDefinition>) => {
|
||||
recordPermissionUse("ffi", "Deno.dlopen", entryModule);
|
||||
return original(path, symbols);
|
||||
};
|
||||
}
|
||||
|
||||
function wrapWasm(importer: ModuleRef) {
|
||||
const originalInstantiate = WebAssembly.instantiate;
|
||||
WebAssembly.instantiate = async (
|
||||
bufferSource: BufferSource | WebAssembly.Module,
|
||||
importObject?: WebAssembly.Imports,
|
||||
) => {
|
||||
recordWasmLoad("wasm://buffer", importer.normalized, "instantiate");
|
||||
return await originalInstantiate(bufferSource, importObject);
|
||||
};
|
||||
|
||||
const originalInstantiateStreaming = WebAssembly.instantiateStreaming;
|
||||
if (originalInstantiateStreaming) {
|
||||
WebAssembly.instantiateStreaming = async (
|
||||
source: Response | Promise<Response>,
|
||||
importObject?: WebAssembly.Imports,
|
||||
) => {
|
||||
try {
|
||||
const response = await source;
|
||||
const url = response?.url || "wasm://stream";
|
||||
recordWasmLoad(url, importer.normalized, "instantiateStreaming");
|
||||
} catch (_) {
|
||||
recordWasmLoad("wasm://stream", importer.normalized, "instantiateStreaming");
|
||||
}
|
||||
return await originalInstantiateStreaming(source as Response, importObject);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function flush() {
|
||||
try {
|
||||
const sorted = events.sort((a, b) => {
|
||||
const at = String(a.ts);
|
||||
const bt = String(b.ts);
|
||||
if (at === bt) return String(a.type).localeCompare(String(b.type));
|
||||
return at.localeCompare(bt);
|
||||
});
|
||||
|
||||
const data = sorted.map((e) => JSON.stringify(e)).join("
|
||||
");
|
||||
Deno.writeTextFileSync("deno-runtime.ndjson", data ? `${data}
|
||||
` : "");
|
||||
} catch (err) {
|
||||
// last-resort logging; avoid throwing
|
||||
console.error("deno-runtime shim failed to write trace", err);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
if (!entrypointEnv) {
|
||||
addEvent({ type: "deno.runtime.error", ts: nowIso(), message: "STELLA_DENO_ENTRYPOINT missing" });
|
||||
flush();
|
||||
return;
|
||||
}
|
||||
|
||||
const entryUrl = toFileUrl(entrypointEnv);
|
||||
const entryModule = normalizeModule(entryUrl.href);
|
||||
|
||||
addEvent({
|
||||
type: "deno.runtime.start",
|
||||
ts: nowIso(),
|
||||
module: entryModule,
|
||||
reason: "shim-start",
|
||||
});
|
||||
|
||||
await primePermissionSnapshot();
|
||||
const loaderHooked = hookModuleLoader();
|
||||
wrapPermissions(entryModule);
|
||||
wrapDlopen(entryModule);
|
||||
wrapWasm(entryModule);
|
||||
|
||||
if (!loaderHooked) {
|
||||
recordModuleLoad(entryUrl.href, "static-import", snapshotPermissions());
|
||||
}
|
||||
|
||||
try {
|
||||
await import(entryUrl.href);
|
||||
} catch (err) {
|
||||
addEvent({ type: "deno.runtime.error", ts: nowIso(), message: String(err?.message ?? err) });
|
||||
} finally {
|
||||
flush();
|
||||
}
|
||||
}
|
||||
|
||||
globalThis.addEventListener("unload", flush);
|
||||
await main();
|
||||
""";
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
global using System;
|
||||
global using System.Collections.Generic;
|
||||
global using System.Globalization;
|
||||
global using System.IO;
|
||||
global using System.Linq;
|
||||
global using System.Security.Cryptography;
|
||||
global using System.Text.Json;
|
||||
global using System.Threading;
|
||||
global using System.Threading.Tasks;
|
||||
|
||||
global using StellaOps.Scanner.Analyzers.Lang;
|
||||
@@ -0,0 +1,48 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Php.Internal;
|
||||
|
||||
internal sealed class ComposerLockData
|
||||
{
|
||||
public ComposerLockData(
|
||||
string lockPath,
|
||||
string? contentHash,
|
||||
string? pluginApiVersion,
|
||||
IReadOnlyList<ComposerPackage> packages,
|
||||
IReadOnlyList<ComposerPackage> devPackages,
|
||||
string? lockSha256)
|
||||
{
|
||||
LockPath = lockPath ?? string.Empty;
|
||||
ContentHash = contentHash;
|
||||
PluginApiVersion = pluginApiVersion;
|
||||
Packages = packages ?? Array.Empty<ComposerPackage>();
|
||||
DevPackages = devPackages ?? Array.Empty<ComposerPackage>();
|
||||
LockSha256 = lockSha256;
|
||||
}
|
||||
|
||||
public string LockPath { get; }
|
||||
|
||||
public string? ContentHash { get; }
|
||||
|
||||
public string? PluginApiVersion { get; }
|
||||
|
||||
public IReadOnlyList<ComposerPackage> Packages { get; }
|
||||
|
||||
public IReadOnlyList<ComposerPackage> DevPackages { get; }
|
||||
|
||||
public string? LockSha256 { get; }
|
||||
|
||||
public bool IsEmpty => Packages.Count == 0 && DevPackages.Count == 0;
|
||||
|
||||
public static ComposerLockData Empty { get; } = new(
|
||||
lockPath: string.Empty,
|
||||
contentHash: null,
|
||||
pluginApiVersion: null,
|
||||
packages: Array.Empty<ComposerPackage>(),
|
||||
devPackages: Array.Empty<ComposerPackage>(),
|
||||
lockSha256: null);
|
||||
|
||||
public static ValueTask<ComposerLockData> LoadAsync(LanguageAnalyzerContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
return ComposerLockReader.LoadAsync(context, cancellationToken);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,122 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Php.Internal;
|
||||
|
||||
internal static class ComposerLockReader
|
||||
{
|
||||
private const string LockFileName = "composer.lock";
|
||||
|
||||
public static async ValueTask<ComposerLockData> LoadAsync(LanguageAnalyzerContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
var lockPath = Path.Combine(context.RootPath, LockFileName);
|
||||
if (!File.Exists(lockPath))
|
||||
{
|
||||
return ComposerLockData.Empty;
|
||||
}
|
||||
|
||||
await using var stream = File.Open(lockPath, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
var root = document.RootElement;
|
||||
|
||||
var contentHash = TryGetString(root, "content-hash");
|
||||
var pluginApiVersion = TryGetString(root, "plugin-api-version");
|
||||
|
||||
var packages = ParsePackages(root, propertyName: "packages", isDev: false);
|
||||
var devPackages = ParsePackages(root, propertyName: "packages-dev", isDev: true);
|
||||
var lockSha = await ComputeSha256Async(lockPath, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return new ComposerLockData(
|
||||
lockPath,
|
||||
contentHash,
|
||||
pluginApiVersion,
|
||||
packages,
|
||||
devPackages,
|
||||
lockSha);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<ComposerPackage> ParsePackages(JsonElement root, string propertyName, bool isDev)
|
||||
{
|
||||
if (!root.TryGetProperty(propertyName, out var packagesElement) || packagesElement.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return Array.Empty<ComposerPackage>();
|
||||
}
|
||||
|
||||
var packages = new List<ComposerPackage>();
|
||||
foreach (var packageElement in packagesElement.EnumerateArray())
|
||||
{
|
||||
if (!TryGetString(packageElement, "name", out var name)
|
||||
|| !TryGetString(packageElement, "version", out var version))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var type = TryGetString(packageElement, "type");
|
||||
var (sourceType, sourceReference) = ParseSource(packageElement);
|
||||
var (distSha, distUrl) = ParseDist(packageElement);
|
||||
|
||||
packages.Add(new ComposerPackage(
|
||||
name,
|
||||
version,
|
||||
type,
|
||||
isDev,
|
||||
sourceType,
|
||||
sourceReference,
|
||||
distSha,
|
||||
distUrl));
|
||||
}
|
||||
|
||||
return packages;
|
||||
}
|
||||
|
||||
private static (string? SourceType, string? SourceReference) ParseSource(JsonElement packageElement)
|
||||
{
|
||||
if (!packageElement.TryGetProperty("source", out var sourceElement) || sourceElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return (null, null);
|
||||
}
|
||||
|
||||
var sourceType = TryGetString(sourceElement, "type");
|
||||
var sourceReference = TryGetString(sourceElement, "reference");
|
||||
return (sourceType, sourceReference);
|
||||
}
|
||||
|
||||
private static (string? DistSha, string? DistUrl) ParseDist(JsonElement packageElement)
|
||||
{
|
||||
if (!packageElement.TryGetProperty("dist", out var distElement) || distElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return (null, null);
|
||||
}
|
||||
|
||||
var distUrl = TryGetString(distElement, "url");
|
||||
var distSha = TryGetString(distElement, "shasum") ?? TryGetString(distElement, "checksum");
|
||||
return (distSha, distUrl);
|
||||
}
|
||||
|
||||
private static string? TryGetString(JsonElement element, string propertyName)
|
||||
=> TryGetString(element, propertyName, out var value) ? value : null;
|
||||
|
||||
private static bool TryGetString(JsonElement element, string propertyName, out string? value)
|
||||
{
|
||||
value = null;
|
||||
if (!element.TryGetProperty(propertyName, out var property))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (property.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
value = property.GetString();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static async ValueTask<string> ComputeSha256Async(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Php.Internal;
|
||||
|
||||
internal sealed record ComposerPackage(
|
||||
string Name,
|
||||
string Version,
|
||||
string? Type,
|
||||
bool IsDev,
|
||||
string? SourceType,
|
||||
string? SourceReference,
|
||||
string? DistSha256,
|
||||
string? DistUrl);
|
||||
@@ -0,0 +1,32 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Php.Internal;
|
||||
|
||||
internal static class PhpCapabilitySignals
|
||||
{
|
||||
private static readonly (string Package, string Key, string Value)[] KnownSignals =
|
||||
{
|
||||
("laravel/framework", "php.capability.framework", "laravel"),
|
||||
("symfony/symfony", "php.capability.framework", "symfony"),
|
||||
("drupal/core", "php.capability.cms", "drupal"),
|
||||
("wordpress/wordpress", "php.capability.cms", "wordpress"),
|
||||
("magento/product-community-edition", "php.capability.cms", "magento"),
|
||||
("cakephp/cakephp", "php.capability.framework", "cakephp"),
|
||||
("slim/slim", "php.capability.framework", "slim"),
|
||||
("codeigniter4/framework", "php.capability.framework", "codeigniter"),
|
||||
("laminas/laminas-mvc", "php.capability.framework", "laminas"),
|
||||
("phpunit/phpunit", "php.capability.test", "phpunit"),
|
||||
("behat/behat", "php.capability.test", "behat")
|
||||
};
|
||||
|
||||
public static IEnumerable<KeyValuePair<string, string?>> FromPackage(ComposerPackage package)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(package);
|
||||
|
||||
foreach (var (packageName, key, value) in KnownSignals)
|
||||
{
|
||||
if (package.Name.Equals(packageName, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Php.Internal;
|
||||
|
||||
internal sealed class PhpPackage
|
||||
{
|
||||
private readonly ComposerPackage _package;
|
||||
private readonly ComposerLockData _lockData;
|
||||
|
||||
public PhpPackage(ComposerPackage package, ComposerLockData lockData)
|
||||
{
|
||||
_package = package ?? throw new ArgumentNullException(nameof(package));
|
||||
_lockData = lockData ?? throw new ArgumentNullException(nameof(lockData));
|
||||
}
|
||||
|
||||
public string Name => _package.Name;
|
||||
|
||||
public string Version => _package.Version;
|
||||
|
||||
public string Purl => $"pkg:composer/{Name}@{Version}";
|
||||
|
||||
public string ComponentKey => $"purl::{Purl}";
|
||||
|
||||
public IEnumerable<KeyValuePair<string, string?>> CreateMetadata()
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>("composer.dev", _package.IsDev ? "true" : "false");
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(_package.Type))
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>("composer.type", _package.Type);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(_package.SourceType))
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>("composer.source.type", _package.SourceType);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(_package.SourceReference))
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>("composer.source.ref", _package.SourceReference);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(_package.DistSha256))
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>("composer.dist.sha256", _package.DistSha256);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(_package.DistUrl))
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>("composer.dist.url", _package.DistUrl);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(_lockData.PluginApiVersion))
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>("composer.plugin_api_version", _lockData.PluginApiVersion);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(_lockData.ContentHash))
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>("composer.content_hash", _lockData.ContentHash);
|
||||
}
|
||||
|
||||
foreach (var signal in PhpCapabilitySignals.FromPackage(_package))
|
||||
{
|
||||
yield return signal;
|
||||
}
|
||||
}
|
||||
|
||||
public IReadOnlyCollection<LanguageComponentEvidence> CreateEvidence()
|
||||
{
|
||||
var locator = string.IsNullOrWhiteSpace(_lockData.LockPath)
|
||||
? "composer.lock"
|
||||
: Path.GetFileName(_lockData.LockPath);
|
||||
|
||||
return new[]
|
||||
{
|
||||
new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.File,
|
||||
"composer.lock",
|
||||
locator,
|
||||
Value: $"{Name}@{Version}",
|
||||
Sha256: _lockData.LockSha256)
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Php.Internal;
|
||||
|
||||
internal static class PhpPackageCollector
|
||||
{
|
||||
public static IReadOnlyList<PhpPackage> Collect(ComposerLockData lockData)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(lockData);
|
||||
|
||||
if (lockData.IsEmpty)
|
||||
{
|
||||
return Array.Empty<PhpPackage>();
|
||||
}
|
||||
|
||||
var packages = new List<PhpPackage>(lockData.Packages.Count + lockData.DevPackages.Count);
|
||||
foreach (var package in lockData.Packages)
|
||||
{
|
||||
packages.Add(new PhpPackage(package, lockData));
|
||||
}
|
||||
|
||||
foreach (var package in lockData.DevPackages)
|
||||
{
|
||||
packages.Add(new PhpPackage(package, lockData));
|
||||
}
|
||||
|
||||
return packages;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
using StellaOps.Scanner.Analyzers.Lang.Plugin;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Php;
|
||||
|
||||
public sealed class PhpAnalyzerPlugin : ILanguageAnalyzerPlugin
|
||||
{
|
||||
public string Name => "StellaOps.Scanner.Analyzers.Lang.Php";
|
||||
|
||||
public bool IsAvailable(IServiceProvider services) => services is not null;
|
||||
|
||||
public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
return new PhpLanguageAnalyzer();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
using StellaOps.Scanner.Analyzers.Lang.Php.Internal;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Php;
|
||||
|
||||
public sealed class PhpLanguageAnalyzer : ILanguageAnalyzer
|
||||
{
|
||||
public string Id => "php";
|
||||
|
||||
public string DisplayName => "PHP Analyzer";
|
||||
|
||||
public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
ArgumentNullException.ThrowIfNull(writer);
|
||||
|
||||
var lockData = await ComposerLockData.LoadAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
var packages = PhpPackageCollector.Collect(lockData);
|
||||
if (packages.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var package in packages.OrderBy(static p => p.ComponentKey, StringComparer.Ordinal))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
writer.AddFromPurl(
|
||||
analyzerId: Id,
|
||||
purl: package.Purl,
|
||||
name: package.Name,
|
||||
version: package.Version,
|
||||
type: "composer",
|
||||
metadata: package.CreateMetadata(),
|
||||
evidence: package.CreateEvidence(),
|
||||
usedByEntrypoint: false);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<EnableDefaultItems>false</EnableDefaultItems>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Include="**\*.cs" Exclude="obj\**;bin\**" />
|
||||
<EmbeddedResource Include="**\*.json" Exclude="obj\**;bin\**" />
|
||||
<None Include="**\*" Exclude="**\*.cs;**\*.json;bin\**;obj\**" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user