feat(metrics): Implement scan metrics repository and PostgreSQL integration
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Added IScanMetricsRepository interface for scan metrics persistence and retrieval. - Implemented PostgresScanMetricsRepository for PostgreSQL database interactions, including methods for saving and retrieving scan metrics and execution phases. - Introduced methods for obtaining TTE statistics and recent scans for tenants. - Implemented deletion of old metrics for retention purposes. test(tests): Add SCA Failure Catalogue tests for FC6-FC10 - Created ScaCatalogueDeterminismTests to validate determinism properties of SCA Failure Catalogue fixtures. - Developed ScaFailureCatalogueTests to ensure correct handling of specific failure modes in the scanner. - Included tests for manifest validation, file existence, and expected findings across multiple failure cases. feat(telemetry): Integrate scan completion metrics into the pipeline - Introduced IScanCompletionMetricsIntegration interface and ScanCompletionMetricsIntegration class to record metrics upon scan completion. - Implemented proof coverage and TTE metrics recording with logging for scan completion summaries.
This commit is contained in:
@@ -0,0 +1,173 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScanMetricsModels.cs
|
||||
// Sprint: SPRINT_3406_0001_0001_metrics_tables
|
||||
// Task: METRICS-3406-005
|
||||
// Description: Entity definitions for scan metrics and TTE tracking
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Per-scan metrics for TTE tracking.
|
||||
/// </summary>
|
||||
public sealed record ScanMetrics
|
||||
{
|
||||
public Guid MetricsId { get; init; }
|
||||
public required Guid ScanId { get; init; }
|
||||
public required Guid TenantId { get; init; }
|
||||
public Guid? SurfaceId { get; init; }
|
||||
|
||||
// Artifact identification
|
||||
public required string ArtifactDigest { get; init; }
|
||||
public required string ArtifactType { get; init; }
|
||||
|
||||
// Reference to replay manifest
|
||||
public string? ReplayManifestHash { get; init; }
|
||||
|
||||
// Digest tracking
|
||||
public required string FindingsSha256 { get; init; }
|
||||
public string? VexBundleSha256 { get; init; }
|
||||
public string? ProofBundleSha256 { get; init; }
|
||||
public string? SbomSha256 { get; init; }
|
||||
|
||||
// Policy reference
|
||||
public string? PolicyDigest { get; init; }
|
||||
public string? FeedSnapshotId { get; init; }
|
||||
|
||||
// Timing
|
||||
public required DateTimeOffset StartedAt { get; init; }
|
||||
public required DateTimeOffset FinishedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time-to-Evidence in milliseconds.
|
||||
/// </summary>
|
||||
public int TotalDurationMs => (int)(FinishedAt - StartedAt).TotalMilliseconds;
|
||||
|
||||
// Phase timings
|
||||
public required ScanPhaseTimings Phases { get; init; }
|
||||
|
||||
// Artifact counts
|
||||
public int? PackageCount { get; init; }
|
||||
public int? FindingCount { get; init; }
|
||||
public int? VexDecisionCount { get; init; }
|
||||
|
||||
// Scanner metadata
|
||||
public required string ScannerVersion { get; init; }
|
||||
public string? ScannerImageDigest { get; init; }
|
||||
|
||||
// Replay mode
|
||||
public bool IsReplay { get; init; }
|
||||
|
||||
public DateTimeOffset CreatedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Phase timing breakdown (milliseconds).
|
||||
/// </summary>
|
||||
public sealed record ScanPhaseTimings
|
||||
{
|
||||
public required int IngestMs { get; init; }
|
||||
public required int AnalyzeMs { get; init; }
|
||||
public required int ReachabilityMs { get; init; }
|
||||
public required int VexMs { get; init; }
|
||||
public required int SignMs { get; init; }
|
||||
public required int PublishMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Sum of all phases.
|
||||
/// </summary>
|
||||
public int TotalMs => IngestMs + AnalyzeMs + ReachabilityMs + VexMs + SignMs + PublishMs;
|
||||
|
||||
/// <summary>
|
||||
/// Create empty timing record.
|
||||
/// </summary>
|
||||
public static ScanPhaseTimings Empty => new()
|
||||
{
|
||||
IngestMs = 0,
|
||||
AnalyzeMs = 0,
|
||||
ReachabilityMs = 0,
|
||||
VexMs = 0,
|
||||
SignMs = 0,
|
||||
PublishMs = 0
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Detailed phase execution record.
|
||||
/// </summary>
|
||||
public sealed record ExecutionPhase
|
||||
{
|
||||
public long Id { get; init; }
|
||||
public required Guid MetricsId { get; init; }
|
||||
public required string PhaseName { get; init; }
|
||||
public required int PhaseOrder { get; init; }
|
||||
public required DateTimeOffset StartedAt { get; init; }
|
||||
public required DateTimeOffset FinishedAt { get; init; }
|
||||
public int DurationMs => (int)(FinishedAt - StartedAt).TotalMilliseconds;
|
||||
public required bool Success { get; init; }
|
||||
public string? ErrorCode { get; init; }
|
||||
public string? ErrorMessage { get; init; }
|
||||
public IReadOnlyDictionary<string, object>? PhaseMetrics { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// TTE statistics for a time period.
|
||||
/// </summary>
|
||||
public sealed record TteStats
|
||||
{
|
||||
public required Guid TenantId { get; init; }
|
||||
public required DateTimeOffset HourBucket { get; init; }
|
||||
public required int ScanCount { get; init; }
|
||||
public required int TteAvgMs { get; init; }
|
||||
public required int TteP50Ms { get; init; }
|
||||
public required int TteP95Ms { get; init; }
|
||||
public required int TteMaxMs { get; init; }
|
||||
public required decimal SloP50CompliancePercent { get; init; }
|
||||
public required decimal SloP95CompliancePercent { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Standard scan phase names.
|
||||
/// </summary>
|
||||
public static class ScanPhaseNames
|
||||
{
|
||||
public const string Ingest = "ingest";
|
||||
public const string Analyze = "analyze";
|
||||
public const string Reachability = "reachability";
|
||||
public const string Vex = "vex";
|
||||
public const string Sign = "sign";
|
||||
public const string Publish = "publish";
|
||||
public const string Other = "other";
|
||||
|
||||
public static readonly IReadOnlyList<string> All =
|
||||
[
|
||||
Ingest,
|
||||
Analyze,
|
||||
Reachability,
|
||||
Vex,
|
||||
Sign,
|
||||
Publish
|
||||
];
|
||||
|
||||
public static int GetPhaseOrder(string phaseName) => phaseName switch
|
||||
{
|
||||
Ingest => 1,
|
||||
Analyze => 2,
|
||||
Reachability => 3,
|
||||
Vex => 4,
|
||||
Sign => 5,
|
||||
Publish => 6,
|
||||
_ => 99
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Artifact type constants.
|
||||
/// </summary>
|
||||
public static class ArtifactTypes
|
||||
{
|
||||
public const string OciImage = "oci_image";
|
||||
public const string Tarball = "tarball";
|
||||
public const string Directory = "directory";
|
||||
public const string Other = "other";
|
||||
}
|
||||
@@ -0,0 +1,208 @@
|
||||
-- Migration: 004_scan_metrics
|
||||
-- Sprint: SPRINT_3406_0001_0001_metrics_tables
|
||||
-- Task: METRICS-3406-001, METRICS-3406-002, METRICS-3406-003, METRICS-3406-004
|
||||
-- Description: Scan metrics tables for TTE tracking and performance analysis
|
||||
|
||||
-- Create scanner schema if not exists
|
||||
CREATE SCHEMA IF NOT EXISTS scanner;
|
||||
|
||||
-- =============================================================================
|
||||
-- Task METRICS-3406-001: scan_metrics Table
|
||||
-- =============================================================================
|
||||
CREATE TABLE IF NOT EXISTS scanner.scan_metrics (
|
||||
metrics_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
-- Scan identification
|
||||
scan_id UUID NOT NULL UNIQUE,
|
||||
tenant_id UUID NOT NULL,
|
||||
surface_id UUID,
|
||||
|
||||
-- Artifact identification
|
||||
artifact_digest TEXT NOT NULL,
|
||||
artifact_type TEXT NOT NULL, -- 'oci_image', 'tarball', 'directory'
|
||||
|
||||
-- Reference to replay manifest (in document store)
|
||||
replay_manifest_hash TEXT,
|
||||
|
||||
-- Digest tracking for determinism
|
||||
findings_sha256 TEXT NOT NULL,
|
||||
vex_bundle_sha256 TEXT,
|
||||
proof_bundle_sha256 TEXT,
|
||||
sbom_sha256 TEXT,
|
||||
|
||||
-- Policy reference
|
||||
policy_digest TEXT,
|
||||
feed_snapshot_id TEXT,
|
||||
|
||||
-- Overall timing
|
||||
started_at TIMESTAMPTZ NOT NULL,
|
||||
finished_at TIMESTAMPTZ NOT NULL,
|
||||
total_duration_ms INT NOT NULL GENERATED ALWAYS AS (
|
||||
EXTRACT(EPOCH FROM (finished_at - started_at)) * 1000
|
||||
) STORED,
|
||||
|
||||
-- Phase timings (milliseconds)
|
||||
t_ingest_ms INT NOT NULL DEFAULT 0,
|
||||
t_analyze_ms INT NOT NULL DEFAULT 0,
|
||||
t_reachability_ms INT NOT NULL DEFAULT 0,
|
||||
t_vex_ms INT NOT NULL DEFAULT 0,
|
||||
t_sign_ms INT NOT NULL DEFAULT 0,
|
||||
t_publish_ms INT NOT NULL DEFAULT 0,
|
||||
|
||||
-- Artifact counts
|
||||
package_count INT,
|
||||
finding_count INT,
|
||||
vex_decision_count INT,
|
||||
|
||||
-- Scanner metadata
|
||||
scanner_version TEXT NOT NULL,
|
||||
scanner_image_digest TEXT,
|
||||
|
||||
-- Replay mode flag
|
||||
is_replay BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
|
||||
-- Timestamps
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT valid_timings CHECK (
|
||||
t_ingest_ms >= 0 AND t_analyze_ms >= 0 AND t_reachability_ms >= 0 AND
|
||||
t_vex_ms >= 0 AND t_sign_ms >= 0 AND t_publish_ms >= 0
|
||||
),
|
||||
CONSTRAINT valid_artifact_type CHECK (artifact_type IN ('oci_image', 'tarball', 'directory', 'other'))
|
||||
);
|
||||
|
||||
COMMENT ON TABLE scanner.scan_metrics IS 'Per-scan metrics for TTE analysis and performance tracking';
|
||||
COMMENT ON COLUMN scanner.scan_metrics.total_duration_ms IS 'Time-to-Evidence in milliseconds';
|
||||
|
||||
-- =============================================================================
|
||||
-- Task METRICS-3406-002: execution_phases Table
|
||||
-- =============================================================================
|
||||
CREATE TABLE IF NOT EXISTS scanner.execution_phases (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
metrics_id UUID NOT NULL REFERENCES scanner.scan_metrics(metrics_id) ON DELETE CASCADE,
|
||||
|
||||
-- Phase identification
|
||||
phase_name TEXT NOT NULL,
|
||||
phase_order INT NOT NULL,
|
||||
|
||||
-- Timing
|
||||
started_at TIMESTAMPTZ NOT NULL,
|
||||
finished_at TIMESTAMPTZ NOT NULL,
|
||||
duration_ms INT NOT NULL GENERATED ALWAYS AS (
|
||||
EXTRACT(EPOCH FROM (finished_at - started_at)) * 1000
|
||||
) STORED,
|
||||
|
||||
-- Status
|
||||
success BOOLEAN NOT NULL,
|
||||
error_code TEXT,
|
||||
error_message TEXT,
|
||||
|
||||
-- Phase-specific metrics (JSONB for flexibility)
|
||||
phase_metrics JSONB,
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT valid_phase_name CHECK (phase_name IN (
|
||||
'ingest', 'analyze', 'reachability', 'vex', 'sign', 'publish', 'other'
|
||||
))
|
||||
);
|
||||
|
||||
COMMENT ON TABLE scanner.execution_phases IS 'Granular phase-level execution details';
|
||||
|
||||
-- =============================================================================
|
||||
-- Task METRICS-3406-004: Indexes
|
||||
-- =============================================================================
|
||||
CREATE INDEX IF NOT EXISTS idx_scan_metrics_tenant ON scanner.scan_metrics(tenant_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_scan_metrics_artifact ON scanner.scan_metrics(artifact_digest);
|
||||
CREATE INDEX IF NOT EXISTS idx_scan_metrics_started ON scanner.scan_metrics(started_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_scan_metrics_surface ON scanner.scan_metrics(surface_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_scan_metrics_replay ON scanner.scan_metrics(is_replay);
|
||||
CREATE INDEX IF NOT EXISTS idx_scan_metrics_tenant_started ON scanner.scan_metrics(tenant_id, started_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_execution_phases_metrics ON scanner.execution_phases(metrics_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_execution_phases_name ON scanner.execution_phases(phase_name);
|
||||
|
||||
-- =============================================================================
|
||||
-- Task METRICS-3406-003: scan_tte View
|
||||
-- =============================================================================
|
||||
CREATE OR REPLACE VIEW scanner.scan_tte AS
|
||||
SELECT
|
||||
metrics_id,
|
||||
scan_id,
|
||||
tenant_id,
|
||||
surface_id,
|
||||
artifact_digest,
|
||||
|
||||
-- TTE calculation
|
||||
total_duration_ms AS tte_ms,
|
||||
(total_duration_ms / 1000.0) AS tte_seconds,
|
||||
(finished_at - started_at) AS tte_interval,
|
||||
|
||||
-- Phase breakdown
|
||||
t_ingest_ms,
|
||||
t_analyze_ms,
|
||||
t_reachability_ms,
|
||||
t_vex_ms,
|
||||
t_sign_ms,
|
||||
t_publish_ms,
|
||||
|
||||
-- Phase percentages
|
||||
ROUND((t_ingest_ms::numeric / NULLIF(total_duration_ms, 0)) * 100, 2) AS ingest_percent,
|
||||
ROUND((t_analyze_ms::numeric / NULLIF(total_duration_ms, 0)) * 100, 2) AS analyze_percent,
|
||||
ROUND((t_reachability_ms::numeric / NULLIF(total_duration_ms, 0)) * 100, 2) AS reachability_percent,
|
||||
ROUND((t_vex_ms::numeric / NULLIF(total_duration_ms, 0)) * 100, 2) AS vex_percent,
|
||||
ROUND((t_sign_ms::numeric / NULLIF(total_duration_ms, 0)) * 100, 2) AS sign_percent,
|
||||
ROUND((t_publish_ms::numeric / NULLIF(total_duration_ms, 0)) * 100, 2) AS publish_percent,
|
||||
|
||||
-- Metadata
|
||||
package_count,
|
||||
finding_count,
|
||||
is_replay,
|
||||
scanner_version,
|
||||
started_at,
|
||||
finished_at
|
||||
|
||||
FROM scanner.scan_metrics;
|
||||
|
||||
COMMENT ON VIEW scanner.scan_tte IS 'Time-to-Evidence metrics per scan';
|
||||
|
||||
-- TTE percentile calculation function
|
||||
CREATE OR REPLACE FUNCTION scanner.tte_percentile(
|
||||
p_tenant_id UUID,
|
||||
p_percentile NUMERIC,
|
||||
p_since TIMESTAMPTZ DEFAULT (NOW() - INTERVAL '7 days')
|
||||
)
|
||||
RETURNS NUMERIC AS $$
|
||||
SELECT PERCENTILE_CONT(p_percentile) WITHIN GROUP (ORDER BY tte_ms)
|
||||
FROM scanner.scan_tte
|
||||
WHERE tenant_id = p_tenant_id
|
||||
AND started_at >= p_since
|
||||
AND NOT is_replay;
|
||||
$$ LANGUAGE SQL STABLE;
|
||||
|
||||
-- TTE statistics aggregation view
|
||||
CREATE OR REPLACE VIEW scanner.tte_stats AS
|
||||
SELECT
|
||||
tenant_id,
|
||||
date_trunc('hour', started_at) AS hour_bucket,
|
||||
|
||||
COUNT(*) AS scan_count,
|
||||
|
||||
-- TTE statistics (ms)
|
||||
AVG(tte_ms)::INT AS tte_avg_ms,
|
||||
PERCENTILE_CONT(0.50) WITHIN GROUP (ORDER BY tte_ms)::INT AS tte_p50_ms,
|
||||
PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY tte_ms)::INT AS tte_p95_ms,
|
||||
MAX(tte_ms) AS tte_max_ms,
|
||||
|
||||
-- SLO compliance (P50 < 120s = 120000ms, P95 < 300s = 300000ms)
|
||||
ROUND(
|
||||
(COUNT(*) FILTER (WHERE tte_ms < 120000)::numeric / COUNT(*)) * 100, 2
|
||||
) AS slo_p50_compliance_percent,
|
||||
ROUND(
|
||||
(COUNT(*) FILTER (WHERE tte_ms < 300000)::numeric / COUNT(*)) * 100, 2
|
||||
) AS slo_p95_compliance_percent
|
||||
|
||||
FROM scanner.scan_tte
|
||||
WHERE NOT is_replay
|
||||
GROUP BY tenant_id, date_trunc('hour', started_at);
|
||||
|
||||
COMMENT ON VIEW scanner.tte_stats IS 'Hourly TTE statistics with SLO compliance';
|
||||
@@ -5,4 +5,5 @@ internal static class MigrationIds
|
||||
public const string CreateTables = "001_create_tables.sql";
|
||||
public const string ProofSpineTables = "002_proof_spine_tables.sql";
|
||||
public const string ClassificationHistory = "003_classification_history.sql";
|
||||
public const string ScanMetrics = "004_scan_metrics.sql";
|
||||
}
|
||||
|
||||
@@ -0,0 +1,85 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IScanMetricsRepository.cs
|
||||
// Sprint: SPRINT_3406_0001_0001_metrics_tables
|
||||
// Task: METRICS-3406-006
|
||||
// Description: Repository interface for scan metrics persistence
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Scanner.Storage.Models;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository for scan metrics persistence and retrieval.
|
||||
/// </summary>
|
||||
public interface IScanMetricsRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Save scan metrics after scan completion.
|
||||
/// </summary>
|
||||
Task SaveAsync(ScanMetrics metrics, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Save execution phase details.
|
||||
/// </summary>
|
||||
Task SavePhaseAsync(ExecutionPhase phase, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Save multiple execution phases.
|
||||
/// </summary>
|
||||
Task SavePhasesAsync(IReadOnlyList<ExecutionPhase> phases, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get metrics by scan ID.
|
||||
/// </summary>
|
||||
Task<ScanMetrics?> GetByScanIdAsync(Guid scanId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get metrics by metrics ID.
|
||||
/// </summary>
|
||||
Task<ScanMetrics?> GetByIdAsync(Guid metricsId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get execution phases for a scan.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<ExecutionPhase>> GetPhasesAsync(Guid metricsId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get TTE statistics for a tenant within a time range.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<TteStats>> GetTteStatsAsync(
|
||||
Guid tenantId,
|
||||
DateTimeOffset since,
|
||||
DateTimeOffset until,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get TTE percentile for a tenant.
|
||||
/// </summary>
|
||||
Task<int?> GetTtePercentileAsync(
|
||||
Guid tenantId,
|
||||
decimal percentile,
|
||||
DateTimeOffset since,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get recent scans for a tenant.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<ScanMetrics>> GetRecentAsync(
|
||||
Guid tenantId,
|
||||
int limit = 100,
|
||||
bool includeReplays = false,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get scans by artifact digest.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<ScanMetrics>> GetByArtifactAsync(
|
||||
string artifactDigest,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Delete old metrics (for retention).
|
||||
/// </summary>
|
||||
Task<int> DeleteOlderThanAsync(DateTimeOffset threshold, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,445 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PostgresScanMetricsRepository.cs
|
||||
// Sprint: SPRINT_3406_0001_0001_metrics_tables
|
||||
// Task: METRICS-3406-007
|
||||
// Description: PostgreSQL implementation of scan metrics repository
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Scanner.Storage.Models;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IScanMetricsRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresScanMetricsRepository : IScanMetricsRepository
|
||||
{
|
||||
private readonly NpgsqlDataSource _dataSource;
|
||||
private readonly ILogger<PostgresScanMetricsRepository> _logger;
|
||||
|
||||
public PostgresScanMetricsRepository(
|
||||
NpgsqlDataSource dataSource,
|
||||
ILogger<PostgresScanMetricsRepository> logger)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task SaveAsync(ScanMetrics metrics, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
INSERT INTO scanner.scan_metrics (
|
||||
metrics_id, scan_id, tenant_id, surface_id,
|
||||
artifact_digest, artifact_type, replay_manifest_hash,
|
||||
findings_sha256, vex_bundle_sha256, proof_bundle_sha256, sbom_sha256,
|
||||
policy_digest, feed_snapshot_id,
|
||||
started_at, finished_at,
|
||||
t_ingest_ms, t_analyze_ms, t_reachability_ms, t_vex_ms, t_sign_ms, t_publish_ms,
|
||||
package_count, finding_count, vex_decision_count,
|
||||
scanner_version, scanner_image_digest, is_replay, created_at
|
||||
) VALUES (
|
||||
@metricsId, @scanId, @tenantId, @surfaceId,
|
||||
@artifactDigest, @artifactType, @replayManifestHash,
|
||||
@findingsSha256, @vexBundleSha256, @proofBundleSha256, @sbomSha256,
|
||||
@policyDigest, @feedSnapshotId,
|
||||
@startedAt, @finishedAt,
|
||||
@tIngestMs, @tAnalyzeMs, @tReachabilityMs, @tVexMs, @tSignMs, @tPublishMs,
|
||||
@packageCount, @findingCount, @vexDecisionCount,
|
||||
@scannerVersion, @scannerImageDigest, @isReplay, @createdAt
|
||||
)
|
||||
ON CONFLICT (scan_id) DO UPDATE SET
|
||||
finished_at = EXCLUDED.finished_at,
|
||||
t_ingest_ms = EXCLUDED.t_ingest_ms,
|
||||
t_analyze_ms = EXCLUDED.t_analyze_ms,
|
||||
t_reachability_ms = EXCLUDED.t_reachability_ms,
|
||||
t_vex_ms = EXCLUDED.t_vex_ms,
|
||||
t_sign_ms = EXCLUDED.t_sign_ms,
|
||||
t_publish_ms = EXCLUDED.t_publish_ms,
|
||||
findings_sha256 = EXCLUDED.findings_sha256,
|
||||
package_count = EXCLUDED.package_count,
|
||||
finding_count = EXCLUDED.finding_count,
|
||||
vex_decision_count = EXCLUDED.vex_decision_count
|
||||
""";
|
||||
|
||||
await using var cmd = _dataSource.CreateCommand(sql);
|
||||
|
||||
var metricsId = metrics.MetricsId == Guid.Empty ? Guid.NewGuid() : metrics.MetricsId;
|
||||
|
||||
cmd.Parameters.AddWithValue("metricsId", metricsId);
|
||||
cmd.Parameters.AddWithValue("scanId", metrics.ScanId);
|
||||
cmd.Parameters.AddWithValue("tenantId", metrics.TenantId);
|
||||
cmd.Parameters.AddWithValue("surfaceId", (object?)metrics.SurfaceId ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("artifactDigest", metrics.ArtifactDigest);
|
||||
cmd.Parameters.AddWithValue("artifactType", metrics.ArtifactType);
|
||||
cmd.Parameters.AddWithValue("replayManifestHash", (object?)metrics.ReplayManifestHash ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("findingsSha256", metrics.FindingsSha256);
|
||||
cmd.Parameters.AddWithValue("vexBundleSha256", (object?)metrics.VexBundleSha256 ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("proofBundleSha256", (object?)metrics.ProofBundleSha256 ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("sbomSha256", (object?)metrics.SbomSha256 ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("policyDigest", (object?)metrics.PolicyDigest ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("feedSnapshotId", (object?)metrics.FeedSnapshotId ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("startedAt", metrics.StartedAt);
|
||||
cmd.Parameters.AddWithValue("finishedAt", metrics.FinishedAt);
|
||||
cmd.Parameters.AddWithValue("tIngestMs", metrics.Phases.IngestMs);
|
||||
cmd.Parameters.AddWithValue("tAnalyzeMs", metrics.Phases.AnalyzeMs);
|
||||
cmd.Parameters.AddWithValue("tReachabilityMs", metrics.Phases.ReachabilityMs);
|
||||
cmd.Parameters.AddWithValue("tVexMs", metrics.Phases.VexMs);
|
||||
cmd.Parameters.AddWithValue("tSignMs", metrics.Phases.SignMs);
|
||||
cmd.Parameters.AddWithValue("tPublishMs", metrics.Phases.PublishMs);
|
||||
cmd.Parameters.AddWithValue("packageCount", (object?)metrics.PackageCount ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("findingCount", (object?)metrics.FindingCount ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("vexDecisionCount", (object?)metrics.VexDecisionCount ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("scannerVersion", metrics.ScannerVersion);
|
||||
cmd.Parameters.AddWithValue("scannerImageDigest", (object?)metrics.ScannerImageDigest ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("isReplay", metrics.IsReplay);
|
||||
cmd.Parameters.AddWithValue("createdAt", metrics.CreatedAt);
|
||||
|
||||
await cmd.ExecuteNonQueryAsync(cancellationToken);
|
||||
|
||||
_logger.LogDebug("Saved scan metrics for scan {ScanId}", metrics.ScanId);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task SavePhaseAsync(ExecutionPhase phase, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await SavePhasesAsync([phase], cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task SavePhasesAsync(IReadOnlyList<ExecutionPhase> phases, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (phases.Count == 0) return;
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO scanner.execution_phases (
|
||||
metrics_id, phase_name, phase_order,
|
||||
started_at, finished_at, success,
|
||||
error_code, error_message, phase_metrics
|
||||
) VALUES (
|
||||
@metricsId, @phaseName, @phaseOrder,
|
||||
@startedAt, @finishedAt, @success,
|
||||
@errorCode, @errorMessage, @phaseMetrics::jsonb
|
||||
)
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
await using var transaction = await connection.BeginTransactionAsync(cancellationToken);
|
||||
|
||||
try
|
||||
{
|
||||
foreach (var phase in phases)
|
||||
{
|
||||
await using var cmd = new NpgsqlCommand(sql, connection, transaction);
|
||||
|
||||
cmd.Parameters.AddWithValue("metricsId", phase.MetricsId);
|
||||
cmd.Parameters.AddWithValue("phaseName", phase.PhaseName);
|
||||
cmd.Parameters.AddWithValue("phaseOrder", phase.PhaseOrder);
|
||||
cmd.Parameters.AddWithValue("startedAt", phase.StartedAt);
|
||||
cmd.Parameters.AddWithValue("finishedAt", phase.FinishedAt);
|
||||
cmd.Parameters.AddWithValue("success", phase.Success);
|
||||
cmd.Parameters.AddWithValue("errorCode", (object?)phase.ErrorCode ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("errorMessage", (object?)phase.ErrorMessage ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("phaseMetrics",
|
||||
phase.PhaseMetrics is not null
|
||||
? JsonSerializer.Serialize(phase.PhaseMetrics)
|
||||
: DBNull.Value);
|
||||
|
||||
await cmd.ExecuteNonQueryAsync(cancellationToken);
|
||||
}
|
||||
|
||||
await transaction.CommitAsync(cancellationToken);
|
||||
}
|
||||
catch
|
||||
{
|
||||
await transaction.RollbackAsync(cancellationToken);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<ScanMetrics?> GetByScanIdAsync(Guid scanId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT * FROM scanner.scan_metrics WHERE scan_id = @scanId
|
||||
""";
|
||||
|
||||
await using var cmd = _dataSource.CreateCommand(sql);
|
||||
cmd.Parameters.AddWithValue("scanId", scanId);
|
||||
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
|
||||
if (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
return MapToScanMetrics(reader);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<ScanMetrics?> GetByIdAsync(Guid metricsId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT * FROM scanner.scan_metrics WHERE metrics_id = @metricsId
|
||||
""";
|
||||
|
||||
await using var cmd = _dataSource.CreateCommand(sql);
|
||||
cmd.Parameters.AddWithValue("metricsId", metricsId);
|
||||
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
|
||||
if (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
return MapToScanMetrics(reader);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<IReadOnlyList<ExecutionPhase>> GetPhasesAsync(Guid metricsId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT * FROM scanner.execution_phases
|
||||
WHERE metrics_id = @metricsId
|
||||
ORDER BY phase_order
|
||||
""";
|
||||
|
||||
await using var cmd = _dataSource.CreateCommand(sql);
|
||||
cmd.Parameters.AddWithValue("metricsId", metricsId);
|
||||
|
||||
var phases = new List<ExecutionPhase>();
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
|
||||
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
phases.Add(MapToExecutionPhase(reader));
|
||||
}
|
||||
|
||||
return phases;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<IReadOnlyList<TteStats>> GetTteStatsAsync(
|
||||
Guid tenantId,
|
||||
DateTimeOffset since,
|
||||
DateTimeOffset until,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT * FROM scanner.tte_stats
|
||||
WHERE tenant_id = @tenantId
|
||||
AND hour_bucket >= @since
|
||||
AND hour_bucket < @until
|
||||
ORDER BY hour_bucket
|
||||
""";
|
||||
|
||||
await using var cmd = _dataSource.CreateCommand(sql);
|
||||
cmd.Parameters.AddWithValue("tenantId", tenantId);
|
||||
cmd.Parameters.AddWithValue("since", since);
|
||||
cmd.Parameters.AddWithValue("until", until);
|
||||
|
||||
var stats = new List<TteStats>();
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
|
||||
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
stats.Add(MapToTteStats(reader));
|
||||
}
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<int?> GetTtePercentileAsync(
|
||||
Guid tenantId,
|
||||
decimal percentile,
|
||||
DateTimeOffset since,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT scanner.tte_percentile(@tenantId, @percentile, @since)
|
||||
""";
|
||||
|
||||
await using var cmd = _dataSource.CreateCommand(sql);
|
||||
cmd.Parameters.AddWithValue("tenantId", tenantId);
|
||||
cmd.Parameters.AddWithValue("percentile", percentile);
|
||||
cmd.Parameters.AddWithValue("since", since);
|
||||
|
||||
var result = await cmd.ExecuteScalarAsync(cancellationToken);
|
||||
return result is DBNull or null ? null : Convert.ToInt32(result);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<IReadOnlyList<ScanMetrics>> GetRecentAsync(
|
||||
Guid tenantId,
|
||||
int limit = 100,
|
||||
bool includeReplays = false,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT * FROM scanner.scan_metrics
|
||||
WHERE tenant_id = @tenantId
|
||||
{(includeReplays ? "" : "AND NOT is_replay")}
|
||||
ORDER BY started_at DESC
|
||||
LIMIT @limit
|
||||
""";
|
||||
|
||||
await using var cmd = _dataSource.CreateCommand(sql);
|
||||
cmd.Parameters.AddWithValue("tenantId", tenantId);
|
||||
cmd.Parameters.AddWithValue("limit", limit);
|
||||
|
||||
var metrics = new List<ScanMetrics>();
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
|
||||
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
metrics.Add(MapToScanMetrics(reader));
|
||||
}
|
||||
|
||||
return metrics;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<IReadOnlyList<ScanMetrics>> GetByArtifactAsync(
|
||||
string artifactDigest,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT * FROM scanner.scan_metrics
|
||||
WHERE artifact_digest = @artifactDigest
|
||||
ORDER BY started_at DESC
|
||||
""";
|
||||
|
||||
await using var cmd = _dataSource.CreateCommand(sql);
|
||||
cmd.Parameters.AddWithValue("artifactDigest", artifactDigest);
|
||||
|
||||
var metrics = new List<ScanMetrics>();
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
|
||||
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
metrics.Add(MapToScanMetrics(reader));
|
||||
}
|
||||
|
||||
return metrics;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<int> DeleteOlderThanAsync(DateTimeOffset threshold, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
DELETE FROM scanner.scan_metrics WHERE started_at < @threshold
|
||||
""";
|
||||
|
||||
await using var cmd = _dataSource.CreateCommand(sql);
|
||||
cmd.Parameters.AddWithValue("threshold", threshold);
|
||||
|
||||
return await cmd.ExecuteNonQueryAsync(cancellationToken);
|
||||
}
|
||||
|
||||
private static ScanMetrics MapToScanMetrics(NpgsqlDataReader reader)
|
||||
{
|
||||
return new ScanMetrics
|
||||
{
|
||||
MetricsId = reader.GetGuid(reader.GetOrdinal("metrics_id")),
|
||||
ScanId = reader.GetGuid(reader.GetOrdinal("scan_id")),
|
||||
TenantId = reader.GetGuid(reader.GetOrdinal("tenant_id")),
|
||||
SurfaceId = reader.IsDBNull(reader.GetOrdinal("surface_id"))
|
||||
? null
|
||||
: reader.GetGuid(reader.GetOrdinal("surface_id")),
|
||||
ArtifactDigest = reader.GetString(reader.GetOrdinal("artifact_digest")),
|
||||
ArtifactType = reader.GetString(reader.GetOrdinal("artifact_type")),
|
||||
ReplayManifestHash = reader.IsDBNull(reader.GetOrdinal("replay_manifest_hash"))
|
||||
? null
|
||||
: reader.GetString(reader.GetOrdinal("replay_manifest_hash")),
|
||||
FindingsSha256 = reader.GetString(reader.GetOrdinal("findings_sha256")),
|
||||
VexBundleSha256 = reader.IsDBNull(reader.GetOrdinal("vex_bundle_sha256"))
|
||||
? null
|
||||
: reader.GetString(reader.GetOrdinal("vex_bundle_sha256")),
|
||||
ProofBundleSha256 = reader.IsDBNull(reader.GetOrdinal("proof_bundle_sha256"))
|
||||
? null
|
||||
: reader.GetString(reader.GetOrdinal("proof_bundle_sha256")),
|
||||
SbomSha256 = reader.IsDBNull(reader.GetOrdinal("sbom_sha256"))
|
||||
? null
|
||||
: reader.GetString(reader.GetOrdinal("sbom_sha256")),
|
||||
PolicyDigest = reader.IsDBNull(reader.GetOrdinal("policy_digest"))
|
||||
? null
|
||||
: reader.GetString(reader.GetOrdinal("policy_digest")),
|
||||
FeedSnapshotId = reader.IsDBNull(reader.GetOrdinal("feed_snapshot_id"))
|
||||
? null
|
||||
: reader.GetString(reader.GetOrdinal("feed_snapshot_id")),
|
||||
StartedAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("started_at")),
|
||||
FinishedAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("finished_at")),
|
||||
Phases = new ScanPhaseTimings
|
||||
{
|
||||
IngestMs = reader.GetInt32(reader.GetOrdinal("t_ingest_ms")),
|
||||
AnalyzeMs = reader.GetInt32(reader.GetOrdinal("t_analyze_ms")),
|
||||
ReachabilityMs = reader.GetInt32(reader.GetOrdinal("t_reachability_ms")),
|
||||
VexMs = reader.GetInt32(reader.GetOrdinal("t_vex_ms")),
|
||||
SignMs = reader.GetInt32(reader.GetOrdinal("t_sign_ms")),
|
||||
PublishMs = reader.GetInt32(reader.GetOrdinal("t_publish_ms"))
|
||||
},
|
||||
PackageCount = reader.IsDBNull(reader.GetOrdinal("package_count"))
|
||||
? null
|
||||
: reader.GetInt32(reader.GetOrdinal("package_count")),
|
||||
FindingCount = reader.IsDBNull(reader.GetOrdinal("finding_count"))
|
||||
? null
|
||||
: reader.GetInt32(reader.GetOrdinal("finding_count")),
|
||||
VexDecisionCount = reader.IsDBNull(reader.GetOrdinal("vex_decision_count"))
|
||||
? null
|
||||
: reader.GetInt32(reader.GetOrdinal("vex_decision_count")),
|
||||
ScannerVersion = reader.GetString(reader.GetOrdinal("scanner_version")),
|
||||
ScannerImageDigest = reader.IsDBNull(reader.GetOrdinal("scanner_image_digest"))
|
||||
? null
|
||||
: reader.GetString(reader.GetOrdinal("scanner_image_digest")),
|
||||
IsReplay = reader.GetBoolean(reader.GetOrdinal("is_replay")),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("created_at"))
|
||||
};
|
||||
}
|
||||
|
||||
private static ExecutionPhase MapToExecutionPhase(NpgsqlDataReader reader)
|
||||
{
|
||||
var phaseMetricsJson = reader.IsDBNull(reader.GetOrdinal("phase_metrics"))
|
||||
? null
|
||||
: reader.GetString(reader.GetOrdinal("phase_metrics"));
|
||||
|
||||
return new ExecutionPhase
|
||||
{
|
||||
Id = reader.GetInt64(reader.GetOrdinal("id")),
|
||||
MetricsId = reader.GetGuid(reader.GetOrdinal("metrics_id")),
|
||||
PhaseName = reader.GetString(reader.GetOrdinal("phase_name")),
|
||||
PhaseOrder = reader.GetInt32(reader.GetOrdinal("phase_order")),
|
||||
StartedAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("started_at")),
|
||||
FinishedAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("finished_at")),
|
||||
Success = reader.GetBoolean(reader.GetOrdinal("success")),
|
||||
ErrorCode = reader.IsDBNull(reader.GetOrdinal("error_code"))
|
||||
? null
|
||||
: reader.GetString(reader.GetOrdinal("error_code")),
|
||||
ErrorMessage = reader.IsDBNull(reader.GetOrdinal("error_message"))
|
||||
? null
|
||||
: reader.GetString(reader.GetOrdinal("error_message")),
|
||||
PhaseMetrics = phaseMetricsJson is not null
|
||||
? JsonSerializer.Deserialize<Dictionary<string, object>>(phaseMetricsJson)
|
||||
: null
|
||||
};
|
||||
}
|
||||
|
||||
private static TteStats MapToTteStats(NpgsqlDataReader reader)
|
||||
{
|
||||
return new TteStats
|
||||
{
|
||||
TenantId = reader.GetGuid(reader.GetOrdinal("tenant_id")),
|
||||
HourBucket = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("hour_bucket")),
|
||||
ScanCount = reader.GetInt32(reader.GetOrdinal("scan_count")),
|
||||
TteAvgMs = reader.GetInt32(reader.GetOrdinal("tte_avg_ms")),
|
||||
TteP50Ms = reader.GetInt32(reader.GetOrdinal("tte_p50_ms")),
|
||||
TteP95Ms = reader.GetInt32(reader.GetOrdinal("tte_p95_ms")),
|
||||
TteMaxMs = reader.GetInt32(reader.GetOrdinal("tte_max_ms")),
|
||||
SloP50CompliancePercent = reader.GetDecimal(reader.GetOrdinal("slo_p50_compliance_percent")),
|
||||
SloP95CompliancePercent = reader.GetDecimal(reader.GetOrdinal("slo_p95_compliance_percent"))
|
||||
};
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user