Refactor code structure and optimize performance across multiple modules
This commit is contained in:
@@ -86,10 +86,11 @@ services:
|
||||
STELLAOPS_AUTHORITY__STORAGE__DRIVER: "postgres"
|
||||
STELLAOPS_AUTHORITY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority/plugins"
|
||||
volumes:
|
||||
- ../../etc/authority.yaml:/etc/authority.yaml:ro
|
||||
- ../../etc/authority.plugins:/app/etc/authority.plugins:ro
|
||||
# Configuration (consolidated under etc/)
|
||||
- ../../etc/authority:/app/etc/authority:ro
|
||||
- ../../etc/certificates/trust-roots:/etc/ssl/certs/stellaops:ro
|
||||
ports:
|
||||
- "${AUTHORITY_PORT:-8440}:8440"
|
||||
networks:
|
||||
@@ -134,14 +135,14 @@ services:
|
||||
- postgres
|
||||
- authority
|
||||
environment:
|
||||
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
||||
ISSUERDIRECTORY__CONFIG: "/app/etc/issuer-directory/issuer-directory.yaml"
|
||||
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
ISSUERDIRECTORY__STORAGE__DRIVER: "postgres"
|
||||
ISSUERDIRECTORY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
||||
volumes:
|
||||
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
||||
- ../../etc/issuer-directory:/app/etc/issuer-directory:ro
|
||||
ports:
|
||||
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
||||
networks:
|
||||
@@ -195,7 +196,11 @@ services:
|
||||
SCANNER__OFFLINEKIT__TRUSTROOTDIRECTORY: "${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}"
|
||||
SCANNER__OFFLINEKIT__REKORSNAPSHOTDIRECTORY: "${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}"
|
||||
volumes:
|
||||
- ${SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH:-./offline/trust-roots}:${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}:ro
|
||||
# Configuration (consolidated under etc/)
|
||||
- ../../etc/scanner:/app/etc/scanner:ro
|
||||
- ../../etc/certificates/trust-roots:/etc/ssl/certs/stellaops:ro
|
||||
# Offline kit paths (for air-gap mode)
|
||||
- ${SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH:-../../etc/certificates/trust-roots}:${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}:ro
|
||||
- ${SCANNER_OFFLINEKIT_REKOR_SNAPSHOT_HOST_PATH:-./offline/rekor-snapshot}:${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}:ro
|
||||
ports:
|
||||
- "${SCANNER_WEB_PORT:-8444}:8444"
|
||||
@@ -256,7 +261,7 @@ services:
|
||||
NOTIFY__QUEUE__DRIVER: "nats"
|
||||
NOTIFY__QUEUE__NATS__URL: "nats://nats:4222"
|
||||
volumes:
|
||||
- ../../etc/notify.dev.yaml:/app/etc/notify.yaml:ro
|
||||
- ../../etc/notify:/app/etc/notify:ro
|
||||
ports:
|
||||
- "${NOTIFY_WEB_PORT:-8446}:8446"
|
||||
networks:
|
||||
@@ -293,6 +298,9 @@ services:
|
||||
ports:
|
||||
- "${ADVISORY_AI_WEB_PORT:-8448}:8448"
|
||||
volumes:
|
||||
# Configuration (consolidated under etc/)
|
||||
- ../../etc/llm-providers:/app/etc/llm-providers:ro
|
||||
# Runtime data
|
||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||
@@ -314,6 +322,9 @@ services:
|
||||
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
||||
volumes:
|
||||
# Configuration (consolidated under etc/)
|
||||
- ../../etc/llm-providers:/app/etc/llm-providers:ro
|
||||
# Runtime data
|
||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||
|
||||
@@ -22,7 +22,6 @@ ENV TZ=UTC
|
||||
# Disable .NET telemetry
|
||||
ENV DOTNET_NOLOGO=1
|
||||
ENV DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||
ENV DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
|
||||
|
||||
# .NET paths
|
||||
ENV DOTNET_ROOT=/usr/share/dotnet
|
||||
@@ -43,18 +42,30 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
jq \
|
||||
# Build tools
|
||||
build-essential \
|
||||
# Docker CLI (for DinD scenarios)
|
||||
docker.io \
|
||||
docker-compose-plugin \
|
||||
# Cross-compilation
|
||||
binutils-aarch64-linux-gnu \
|
||||
# Python (for scripts)
|
||||
python3 \
|
||||
python3-pip \
|
||||
# .NET dependencies
|
||||
libicu70 \
|
||||
# Locales
|
||||
locales \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# ===========================================================================
|
||||
# DOCKER CLI & COMPOSE (from official Docker repo)
|
||||
# ===========================================================================
|
||||
|
||||
RUN install -m 0755 -d /etc/apt/keyrings \
|
||||
&& curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc \
|
||||
&& chmod a+r /etc/apt/keyrings/docker.asc \
|
||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu jammy stable" > /etc/apt/sources.list.d/docker.list \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y --no-install-recommends docker-ce-cli docker-compose-plugin \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& docker --version
|
||||
|
||||
# Set locale
|
||||
RUN locale-gen en_US.UTF-8
|
||||
ENV LANG=en_US.UTF-8
|
||||
@@ -132,19 +143,20 @@ RUN useradd -m -s /bin/bash ciuser \
|
||||
&& chown -R ciuser:ciuser /home/ciuser
|
||||
|
||||
# Health check script
|
||||
COPY --chmod=755 <<'EOF' /usr/local/bin/ci-health-check
|
||||
#!/bin/bash
|
||||
set -e
|
||||
echo "=== CI Environment Health Check ==="
|
||||
echo "OS: $(cat /etc/os-release | grep PRETTY_NAME | cut -d= -f2)"
|
||||
echo ".NET: $(dotnet --version)"
|
||||
echo "Node: $(node --version)"
|
||||
echo "npm: $(npm --version)"
|
||||
echo "Helm: $(helm version --short)"
|
||||
echo "Cosign: $(cosign version 2>&1 | head -1)"
|
||||
echo "Docker: $(docker --version 2>/dev/null || echo 'Not available')"
|
||||
echo "PostgreSQL client: $(psql --version)"
|
||||
echo "=== All checks passed ==="
|
||||
EOF
|
||||
RUN printf '%s\n' \
|
||||
'#!/bin/bash' \
|
||||
'set -e' \
|
||||
'echo "=== CI Environment Health Check ==="' \
|
||||
'echo "OS: $(cat /etc/os-release | grep PRETTY_NAME | cut -d= -f2)"' \
|
||||
'echo ".NET: $(dotnet --version)"' \
|
||||
'echo "Node: $(node --version)"' \
|
||||
'echo "npm: $(npm --version)"' \
|
||||
'echo "Helm: $(helm version --short)"' \
|
||||
'echo "Cosign: $(cosign version 2>&1 | head -1)"' \
|
||||
'echo "Docker: $(docker --version 2>/dev/null || echo Not available)"' \
|
||||
'echo "PostgreSQL client: $(psql --version)"' \
|
||||
'echo "=== All checks passed ==="' \
|
||||
> /usr/local/bin/ci-health-check \
|
||||
&& chmod +x /usr/local/bin/ci-health-check
|
||||
|
||||
ENTRYPOINT ["/bin/bash"]
|
||||
|
||||
166
devops/docs/README.md
Normal file
166
devops/docs/README.md
Normal file
@@ -0,0 +1,166 @@
|
||||
# DevOps Infrastructure
|
||||
|
||||
This directory contains operational tooling, deployment configurations, and CI/CD support for StellaOps.
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
devops/
|
||||
├── ansible/ # Ansible playbooks for deployment automation
|
||||
├── compose/ # Docker Compose configurations
|
||||
├── database/ # Database schemas and migrations
|
||||
│ ├── mongo/ # MongoDB (deprecated)
|
||||
│ └── postgres/ # PostgreSQL schemas
|
||||
├── docker/ # Dockerfiles and container build scripts
|
||||
│ ├── Dockerfile.ci # CI runner environment
|
||||
│ └── base/ # Base images
|
||||
├── docs/ # This documentation
|
||||
├── gitlab/ # GitLab CI templates (legacy)
|
||||
├── helm/ # Helm charts for Kubernetes deployment
|
||||
├── logging/ # Logging configuration templates
|
||||
│ ├── serilog.json.template # Serilog config for .NET services
|
||||
│ ├── filebeat.yml # Filebeat for log shipping
|
||||
│ └── logrotate.conf # Log rotation configuration
|
||||
├── observability/ # Monitoring, metrics, and tracing
|
||||
├── offline/ # Air-gap deployment support
|
||||
│ ├── airgap/ # Air-gap bundle scripts
|
||||
│ └── kit/ # Offline installation kit
|
||||
├── releases/ # Release artifacts and manifests
|
||||
├── scripts/ # Operational scripts
|
||||
├── services/ # Per-service operational configs
|
||||
├── telemetry/ # OpenTelemetry and metrics configs
|
||||
└── tools/ # DevOps tooling
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Local CI Environment
|
||||
|
||||
Build and run the CI Docker environment locally:
|
||||
|
||||
```bash
|
||||
# Build the CI image
|
||||
docker build -f devops/docker/Dockerfile.ci -t stellaops-ci:local .
|
||||
|
||||
# Run tests in CI environment
|
||||
docker run --rm -v $(pwd):/workspace stellaops-ci:local \
|
||||
dotnet test --filter "Category=Unit"
|
||||
```
|
||||
|
||||
### Local Testing
|
||||
|
||||
```bash
|
||||
# Run all PR-gating tests
|
||||
./devops/scripts/test-local.sh
|
||||
|
||||
# Validate compose configurations
|
||||
./devops/scripts/validate-compose.sh
|
||||
|
||||
# Validate Helm charts
|
||||
./.gitea/scripts/validate/validate-helm.sh
|
||||
```
|
||||
|
||||
### Logging Configuration
|
||||
|
||||
The `logging/` directory contains templates for centralized logging:
|
||||
|
||||
1. **Serilog** (`serilog.json.template`) - Structured logging for .NET services
|
||||
- Console and file sinks
|
||||
- Rolling files with 14-day retention
|
||||
- 100MB file size limit with roll-over
|
||||
- Environment-variable templating
|
||||
|
||||
2. **Filebeat** (`filebeat.yml`) - Log shipping to Elasticsearch/Logstash
|
||||
- JSON log parsing from Serilog output
|
||||
- Container log support
|
||||
- Kubernetes metadata enrichment
|
||||
- Air-gap fallback to file output
|
||||
|
||||
3. **Logrotate** (`logrotate.conf`) - System-level log rotation
|
||||
- Daily rotation with 14-day retention
|
||||
- Compression with delay
|
||||
- Service-specific overrides for high-volume services
|
||||
|
||||
To use:
|
||||
|
||||
```bash
|
||||
# Copy template and customize
|
||||
cp devops/logging/serilog.json.template /etc/stellaops/serilog.json
|
||||
|
||||
# Set service name
|
||||
export STELLAOPS_SERVICE_NAME=scanner
|
||||
|
||||
# Install filebeat config (requires root)
|
||||
sudo cp devops/logging/filebeat.yml /etc/filebeat/filebeat.yml
|
||||
|
||||
# Install logrotate config (requires root)
|
||||
sudo cp devops/logging/logrotate.conf /etc/logrotate.d/stellaops
|
||||
```
|
||||
|
||||
## Compose Profiles
|
||||
|
||||
The `compose/` directory contains Docker Compose configurations with profiles:
|
||||
|
||||
| Profile | Description |
|
||||
|---------|-------------|
|
||||
| `core` | Essential services (PostgreSQL, Router, Authority) |
|
||||
| `scanner` | Vulnerability scanning services |
|
||||
| `full` | All services for complete deployment |
|
||||
| `dev` | Development profile with hot-reload |
|
||||
| `test` | Testing profile with test containers |
|
||||
|
||||
```bash
|
||||
# Start core services
|
||||
docker compose --profile core up -d
|
||||
|
||||
# Start full stack
|
||||
docker compose --profile full up -d
|
||||
```
|
||||
|
||||
## Helm Charts
|
||||
|
||||
The `helm/` directory contains Helm charts for Kubernetes:
|
||||
|
||||
```bash
|
||||
# Lint charts
|
||||
helm lint devops/helm/stellaops
|
||||
|
||||
# Template with values
|
||||
helm template stellaops devops/helm/stellaops -f values.yaml
|
||||
|
||||
# Install
|
||||
helm install stellaops devops/helm/stellaops -n stellaops --create-namespace
|
||||
```
|
||||
|
||||
## Release Process
|
||||
|
||||
See [RELEASE_PROCESS.md](../../docs/releases/RELEASE_PROCESS.md) for the complete release workflow.
|
||||
|
||||
Quick release commands:
|
||||
|
||||
```bash
|
||||
# Dry-run release build
|
||||
python devops/release/build_release.py --version 2026.04.0 --dry-run
|
||||
|
||||
# Verify release artifacts
|
||||
python devops/release/verify_release.py --release-dir out/release
|
||||
```
|
||||
|
||||
## Air-Gap / Offline Deployment
|
||||
|
||||
The `offline/` directory contains tools for air-gapped environments:
|
||||
|
||||
```bash
|
||||
# Create offline bundle
|
||||
./devops/offline/airgap/create-bundle.sh --version 2026.04
|
||||
|
||||
# Import on air-gapped system
|
||||
./devops/offline/kit/import-bundle.sh stellaops-2026.04-bundle.tar.gz
|
||||
```
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Release Engineering Playbook](../../docs/13_RELEASE_ENGINEERING_PLAYBOOK.md)
|
||||
- [Versioning Strategy](../../docs/releases/VERSIONING.md)
|
||||
- [Offline Kit Guide](../../docs/24_OFFLINE_KIT.md)
|
||||
- [CI/CD Workflows](../../.gitea/workflows/README.md)
|
||||
97
devops/logging/filebeat.yml
Normal file
97
devops/logging/filebeat.yml
Normal file
@@ -0,0 +1,97 @@
|
||||
# StellaOps Filebeat Configuration
|
||||
# Ships logs to Elasticsearch/Logstash for centralized logging
|
||||
|
||||
filebeat.inputs:
|
||||
# Application logs (JSON format from Serilog)
|
||||
- type: log
|
||||
enabled: true
|
||||
paths:
|
||||
- /var/log/stellaops/*/*.log
|
||||
json.keys_under_root: true
|
||||
json.add_error_key: true
|
||||
json.message_key: message
|
||||
json.overwrite_keys: true
|
||||
fields:
|
||||
log_type: application
|
||||
fields_under_root: true
|
||||
multiline:
|
||||
type: pattern
|
||||
pattern: '^\[?[0-9]{4}-[0-9]{2}-[0-9]{2}'
|
||||
negate: true
|
||||
match: after
|
||||
|
||||
# Container logs (stdout/stderr)
|
||||
- type: container
|
||||
enabled: true
|
||||
paths:
|
||||
- /var/lib/docker/containers/*/*.log
|
||||
processors:
|
||||
- add_kubernetes_metadata:
|
||||
host: ${NODE_NAME}
|
||||
matchers:
|
||||
- logs_path:
|
||||
logs_path: "/var/lib/docker/containers/"
|
||||
|
||||
# Processors for all inputs
|
||||
processors:
|
||||
- add_host_metadata:
|
||||
when.not.contains.tags: forwarded
|
||||
- add_cloud_metadata: ~
|
||||
- add_docker_metadata: ~
|
||||
- decode_json_fields:
|
||||
fields: ["message"]
|
||||
target: ""
|
||||
overwrite_keys: true
|
||||
when:
|
||||
has_fields: ["message"]
|
||||
- drop_fields:
|
||||
fields: ["agent.ephemeral_id", "agent.id", "agent.name"]
|
||||
ignore_missing: true
|
||||
|
||||
# Output configuration
|
||||
output.elasticsearch:
|
||||
enabled: ${FILEBEAT_ELASTICSEARCH_ENABLED:false}
|
||||
hosts: ["${ELASTICSEARCH_HOST:localhost}:${ELASTICSEARCH_PORT:9200}"]
|
||||
protocol: "${ELASTICSEARCH_PROTOCOL:http}"
|
||||
username: "${ELASTICSEARCH_USERNAME:}"
|
||||
password: "${ELASTICSEARCH_PASSWORD:}"
|
||||
index: "stellaops-%{[fields.log_type]}-%{+yyyy.MM.dd}"
|
||||
ssl:
|
||||
enabled: ${ELASTICSEARCH_SSL_ENABLED:false}
|
||||
verification_mode: "${ELASTICSEARCH_SSL_VERIFICATION:full}"
|
||||
|
||||
output.logstash:
|
||||
enabled: ${FILEBEAT_LOGSTASH_ENABLED:false}
|
||||
hosts: ["${LOGSTASH_HOST:localhost}:${LOGSTASH_PORT:5044}"]
|
||||
ssl:
|
||||
enabled: ${LOGSTASH_SSL_ENABLED:false}
|
||||
|
||||
# Fallback to file output (useful for air-gapped environments)
|
||||
output.file:
|
||||
enabled: ${FILEBEAT_FILE_ENABLED:false}
|
||||
path: "/var/log/filebeat"
|
||||
filename: stellaops-filebeat
|
||||
rotate_every_kb: 10240
|
||||
number_of_files: 7
|
||||
|
||||
# Logging
|
||||
logging.level: info
|
||||
logging.to_files: true
|
||||
logging.files:
|
||||
path: /var/log/filebeat
|
||||
name: filebeat
|
||||
keepfiles: 7
|
||||
permissions: 0640
|
||||
|
||||
# Index Lifecycle Management
|
||||
setup.ilm:
|
||||
enabled: true
|
||||
rollover_alias: "stellaops"
|
||||
pattern: "{now/d}-000001"
|
||||
policy_name: "stellaops-ilm-policy"
|
||||
|
||||
# Kibana dashboards
|
||||
setup.kibana:
|
||||
enabled: ${KIBANA_ENABLED:false}
|
||||
host: "${KIBANA_HOST:localhost}:${KIBANA_PORT:5601}"
|
||||
protocol: "${KIBANA_PROTOCOL:http}"
|
||||
83
devops/logging/logrotate.conf
Normal file
83
devops/logging/logrotate.conf
Normal file
@@ -0,0 +1,83 @@
|
||||
# StellaOps Logrotate Configuration
|
||||
# Place in /etc/logrotate.d/stellaops
|
||||
|
||||
/var/log/stellaops/*/*.log {
|
||||
daily
|
||||
rotate 14
|
||||
compress
|
||||
delaycompress
|
||||
missingok
|
||||
notifempty
|
||||
create 0640 stellaops stellaops
|
||||
sharedscripts
|
||||
dateext
|
||||
dateformat -%Y%m%d
|
||||
|
||||
# Size-based rotation (rotate if larger than 100MB regardless of time)
|
||||
maxsize 100M
|
||||
|
||||
# Minimum size before considering rotation
|
||||
minsize 1M
|
||||
|
||||
postrotate
|
||||
# Signal services to reopen log files if needed
|
||||
# Most Serilog file sinks handle this automatically
|
||||
/bin/true
|
||||
endscript
|
||||
}
|
||||
|
||||
# Scanner service specific (higher volume)
|
||||
/var/log/stellaops/scanner/*.log {
|
||||
daily
|
||||
rotate 7
|
||||
compress
|
||||
delaycompress
|
||||
missingok
|
||||
notifempty
|
||||
create 0640 stellaops stellaops
|
||||
sharedscripts
|
||||
dateext
|
||||
maxsize 200M
|
||||
}
|
||||
|
||||
# Concelier service (vulnerability processing)
|
||||
/var/log/stellaops/concelier/*.log {
|
||||
daily
|
||||
rotate 14
|
||||
compress
|
||||
delaycompress
|
||||
missingok
|
||||
notifempty
|
||||
create 0640 stellaops stellaops
|
||||
sharedscripts
|
||||
dateext
|
||||
maxsize 150M
|
||||
}
|
||||
|
||||
# Authority service (signing operations - keep longer for audit)
|
||||
/var/log/stellaops/authority/*.log {
|
||||
daily
|
||||
rotate 30
|
||||
compress
|
||||
delaycompress
|
||||
missingok
|
||||
notifempty
|
||||
create 0640 stellaops stellaops
|
||||
sharedscripts
|
||||
dateext
|
||||
maxsize 50M
|
||||
}
|
||||
|
||||
# Router/Gateway logs
|
||||
/var/log/stellaops/router/*.log {
|
||||
daily
|
||||
rotate 7
|
||||
compress
|
||||
delaycompress
|
||||
missingok
|
||||
notifempty
|
||||
create 0640 stellaops stellaops
|
||||
sharedscripts
|
||||
dateext
|
||||
maxsize 100M
|
||||
}
|
||||
62
devops/logging/serilog.json.template
Normal file
62
devops/logging/serilog.json.template
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"Serilog": {
|
||||
"Using": [
|
||||
"Serilog.Sinks.Console",
|
||||
"Serilog.Sinks.File",
|
||||
"Serilog.Enrichers.Thread",
|
||||
"Serilog.Enrichers.Environment"
|
||||
],
|
||||
"MinimumLevel": {
|
||||
"Default": "Information",
|
||||
"Override": {
|
||||
"Microsoft": "Warning",
|
||||
"Microsoft.AspNetCore": "Warning",
|
||||
"Microsoft.EntityFrameworkCore": "Warning",
|
||||
"System": "Warning",
|
||||
"System.Net.Http": "Warning",
|
||||
"Grpc": "Warning"
|
||||
}
|
||||
},
|
||||
"WriteTo": [
|
||||
{
|
||||
"Name": "Console",
|
||||
"Args": {
|
||||
"outputTemplate": "[{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} {Level:u3}] [{SourceContext}] {Message:lj}{NewLine}{Exception}",
|
||||
"theme": "Serilog.Sinks.SystemConsole.Themes.AnsiConsoleTheme::Code, Serilog.Sinks.Console"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Name": "File",
|
||||
"Args": {
|
||||
"path": "/var/log/stellaops/${STELLAOPS_SERVICE_NAME:-.}/stellaops-.log",
|
||||
"rollingInterval": "Day",
|
||||
"retainedFileCountLimit": 14,
|
||||
"outputTemplate": "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] [{SourceContext}] [{TraceId}] {Message:lj}{NewLine}{Exception}",
|
||||
"fileSizeLimitBytes": 104857600,
|
||||
"rollOnFileSizeLimit": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"Enrich": [
|
||||
"FromLogContext",
|
||||
"WithMachineName",
|
||||
"WithThreadId",
|
||||
"WithEnvironmentName",
|
||||
"WithProcessId"
|
||||
],
|
||||
"Properties": {
|
||||
"Application": "StellaOps",
|
||||
"ServiceName": "${STELLAOPS_SERVICE_NAME:-Unknown}",
|
||||
"Environment": "${ASPNETCORE_ENVIRONMENT:-Production}"
|
||||
},
|
||||
"Filter": [
|
||||
{
|
||||
"Name": "ByExcluding",
|
||||
"Args": {
|
||||
"expression": "Contains(@Message, 'Executing endpoint')"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"AllowedHosts": "*"
|
||||
}
|
||||
114
devops/manifests/binary-plugins.manifest.json
Normal file
114
devops/manifests/binary-plugins.manifest.json
Normal file
@@ -0,0 +1,114 @@
|
||||
{
|
||||
"generated_utc": "2025-11-18T21:41:23.225667Z",
|
||||
"summary": "Pinned binaries (non-NuGet) tracked for integrity; relocate new artefacts here or under offline/feeds.",
|
||||
"entries": [
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Deno.Tests/StellaOps.Scanner.Analyzers.Lang.Deno.Tests.dll",
|
||||
"sha256": "347e600c14671db7015aa3d08b449a7e7bbd9dcfb3b1d4e31cd5a44d2af7b4c7",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Deno/StellaOps.Scanner.Analyzers.Lang.Deno.dll",
|
||||
"sha256": "6fb59d1497c6c222df883405177ee7a03e967570671b4a4e39c1ca41df5ee507",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.dll",
|
||||
"sha256": "aceea5db1340463db2038cecb528357532d3d5d0102fc9ce0f13d1f0888f0621",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.dll",
|
||||
"sha256": "87a0308b4e25f29137d2722bf091628d1753a02414e474f6958c01353d78a95f",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Java.Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests.dll",
|
||||
"sha256": "64279fba6e3dcd6e34290565f3d324ad306bc9e971b2fa191eeafbd70868411b",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.dll",
|
||||
"sha256": "fb2201b2d1ae60c31d2f2390f37b5a574368952e952f05c41989cbec96746dc5",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Node.Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests.dll",
|
||||
"sha256": "95f11346a72b28297c307d71c226b2d7f2dc7b465a85b6ca99e6fc739ff92c73",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.dll",
|
||||
"sha256": "45d59201b3d52fcb022035b00afca0c27f62993d727f5dbfc3ec120e1f3090ba",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.dll",
|
||||
"sha256": "e4ccaed15c551f859dbee367849c8c99ca5554a5c10926988c9fe2afe0af07ea",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Ruby.Tests/StellaOps.Scanner.Analyzers.Lang.Ruby.Tests.dll",
|
||||
"sha256": "a0b641a18ff55056e16c5f15b3124a7fcfa8f99e2e16166b68df9372a79c37b2",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Ruby/StellaOps.Scanner.Analyzers.Lang.Ruby.dll",
|
||||
"sha256": "20624ef44aa797339e73e448dbc82e28e9adfac5262ba4b6c9fddb4e1ed89cbc",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks.dll",
|
||||
"sha256": "a0df5ffdbb043354adef3b3b1203e151b64a4f1c34e560d2bd182188e5535538",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.dll",
|
||||
"sha256": "af19afd814ede740b547514073640a1ce7cd55d346335761d5393d31b0f64224",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.dll",
|
||||
"sha256": "819e7fa3d30d37d972c630c96828ad121bbef184ca977bc2245f9e9ec9815cc8",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.dll",
|
||||
"sha256": "760b531182a497e76c1fa987d6bd834aa4b369f815542fa6b8e10452dc7048ff",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.dll",
|
||||
"sha256": "8cc75f09efa8c656106ed96ad5ab08a0c388aa4beb56aadf6b07bf6d76c00085",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.dll",
|
||||
"sha256": "987593dd273f398f07f38b349eaedd6338c5615e976dad1633323348f7b3e9ac",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
},
|
||||
{
|
||||
"path": "plugins/scanner/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.dll",
|
||||
"sha256": "4266013acbf3a0d0a02e2682c7e32335c2c3f9263e71b917bac34dac4f70d476",
|
||||
"type": "binary",
|
||||
"owner": "plugins"
|
||||
}
|
||||
]
|
||||
}
|
||||
12
devops/offline/feeds/manifest.json
Normal file
12
devops/offline/feeds/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"generated_utc": "2025-11-18T21:41:23.244597Z",
|
||||
"summary": "Offline feed bundles registered here. Add entries when baking air-gap bundles.",
|
||||
"feeds": [
|
||||
{
|
||||
"name": "telemetry-offline-bundle",
|
||||
"path": "offline/feeds/telemetry-offline-bundle.tar.gz",
|
||||
"sha256": "49d3ac3502bad1caaed4c1f7bceaa4ce40fdfce6210d4ae20c90386aeb84ca4e",
|
||||
"description": "Telemetry offline bundle (migrated from out/telemetry)"
|
||||
}
|
||||
]
|
||||
}
|
||||
BIN
devops/offline/feeds/telemetry-offline-bundle.tar.gz
Normal file
BIN
devops/offline/feeds/telemetry-offline-bundle.tar.gz
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
||||
49d3ac3502bad1caaed4c1f7bceaa4ce40fdfce6210d4ae20c90386aeb84ca4e telemetry-offline-bundle.tar.gz
|
||||
@@ -0,0 +1,4 @@
|
||||
bb1da224c09031996224154611f2e1c2143c23b96ab583191766f7d281b20800 hashes.sha256
|
||||
421af53f9eeba6903098d292fbd56f98be62ea6130b5161859889bf11d699d18 sample-sbom-context.json
|
||||
e5aecfba5cee8d412408fb449f12fa4d5bf0a7cb7e5b316b99da3b9019897186 sample-vuln-output.ndjson
|
||||
736efd36508de7b72c9cbddf851335d9534c326af1670be7d101cbb91634357d sbom-context-response.json
|
||||
@@ -0,0 +1,2 @@
|
||||
421af53f9eeba6903098d292fbd56f98be62ea6130b5161859889bf11d699d18 out/console/guardrails/cli-vuln-29-001/sample-sbom-context.json
|
||||
e5aecfba5cee8d412408fb449f12fa4d5bf0a7cb7e5b316b99da3b9019897186 out/console/guardrails/cli-vuln-29-001/sample-vuln-output.ndjson
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"schema": "stellaops.sbom.context/1.0",
|
||||
"input": "sbom.json",
|
||||
"generated": "2025-11-19T00:00:00Z",
|
||||
"packages": [
|
||||
{"name": "openssl", "version": "1.1.1w", "purl": "pkg:deb/openssl@1.1.1w"},
|
||||
{"name": "zlib", "version": "1.2.11", "purl": "pkg:deb/zlib@1.2.11"}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
{"command":"stella vuln scan","version":"0.1.0","tenant":"demo","input":"sbom.json","generated":"2025-11-19T00:00:00Z","summary":{"packages":3,"vulnerabilities":2},"vulnerabilities":[{"id":"CVE-2024-1234","package":"openssl","version":"1.1.1w","severity":"HIGH","source":"nvd","path":"/usr/lib/libssl.so"},{"id":"CVE-2024-2345","package":"zlib","version":"1.2.11","severity":"MEDIUM","source":"nvd","path":"/usr/lib/libz.so"}],"provenance":{"sbom_digest":"sha256:dummy-sbom","profile":"offline","evidence_bundle":"mirror-thin-m0-sample"}}
|
||||
@@ -0,0 +1 @@
|
||||
{"schema":"stellaops.sbom.context/1.0","generated":"2025-11-19T00:00:00Z","packages":[{"name":"openssl","version":"1.1.1w","purl":"pkg:deb/openssl@1.1.1w"},{"name":"zlib","version":"1.2.11","purl":"pkg:deb/zlib@1.2.11"}],"timeline":8,"dependencyPaths":5,"hash":"sha256:421af53f9eeba6903098d292fbd56f98be62ea6130b5161859889bf11d699d18"}
|
||||
@@ -0,0 +1,4 @@
|
||||
bb1da224c09031996224154611f2e1c2143c23b96ab583191766f7d281b20800 hashes.sha256
|
||||
421af53f9eeba6903098d292fbd56f98be62ea6130b5161859889bf11d699d18 sample-sbom-context.json
|
||||
e5aecfba5cee8d412408fb449f12fa4d5bf0a7cb7e5b316b99da3b9019897186 sample-vuln-output.ndjson
|
||||
1f8df765be98c193ac6fa52af778e2e0ec24a7c5acbdfe7a4a461d45bf98f573 sbom-context-response.json
|
||||
@@ -0,0 +1,2 @@
|
||||
421af53f9eeba6903098d292fbd56f98be62ea6130b5161859889bf11d699d18 out/console/guardrails/cli-vuln-29-001/sample-sbom-context.json
|
||||
e5aecfba5cee8d412408fb449f12fa4d5bf0a7cb7e5b316b99da3b9019897186 out/console/guardrails/cli-vuln-29-001/sample-vuln-output.ndjson
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"schema": "stellaops.sbom.context/1.0",
|
||||
"input": "sbom.json",
|
||||
"generated": "2025-11-19T00:00:00Z",
|
||||
"packages": [
|
||||
{"name": "openssl", "version": "1.1.1w", "purl": "pkg:deb/openssl@1.1.1w"},
|
||||
{"name": "zlib", "version": "1.2.11", "purl": "pkg:deb/zlib@1.2.11"}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
{"command":"stella vuln scan","version":"0.1.0","tenant":"demo","input":"sbom.json","generated":"2025-11-19T00:00:00Z","summary":{"packages":3,"vulnerabilities":2},"vulnerabilities":[{"id":"CVE-2024-1234","package":"openssl","version":"1.1.1w","severity":"HIGH","source":"nvd","path":"/usr/lib/libssl.so"},{"id":"CVE-2024-2345","package":"zlib","version":"1.2.11","severity":"MEDIUM","source":"nvd","path":"/usr/lib/libz.so"}],"provenance":{"sbom_digest":"sha256:dummy-sbom","profile":"offline","evidence_bundle":"mirror-thin-m0-sample"}}
|
||||
@@ -0,0 +1 @@
|
||||
{"schema":"stellaops.sbom.context/1.0","generated":"2025-12-08T15:34:22.6874898+00:00","artifactId":"ghcr.io/stellaops/sample-api","purl":"pkg:npm/lodash@4.17.21","versions":[{"version":"2025.11.16.1","firstObserved":"2025-11-16T12:00:00+00:00","lastObserved":"2025-11-16T12:00:00+00:00","status":"observed","source":"scanner:surface_bundle_mock_v1.tgz","isFixAvailable":false,"metadata":{"provenance":"scanner:surface_bundle_mock_v1.tgz","digest":"sha256:112","source_bundle_hash":"sha256:bundle112"}},{"version":"2025.11.15.1","firstObserved":"2025-11-15T12:00:00+00:00","lastObserved":"2025-11-15T12:00:00+00:00","status":"observed","source":"scanner:surface_bundle_mock_v1.tgz","isFixAvailable":false,"metadata":{"provenance":"scanner:surface_bundle_mock_v1.tgz","digest":"sha256:111","source_bundle_hash":"sha256:bundle111"}}],"dependencyPaths":[{"nodes":[{"identifier":"sample-api","version":null},{"identifier":"rollup","version":null},{"identifier":"lodash","version":null}],"isRuntime":false,"source":"sbom.paths","metadata":{"environment":"prod","path_length":"3","artifact":"ghcr.io/stellaops/sample-api@sha256:111","nearest_safe_version":"pkg:npm/lodash@4.17.22","blast_radius":"low","scope":"build"}},{"nodes":[{"identifier":"sample-api","version":null},{"identifier":"express","version":null},{"identifier":"lodash","version":null}],"isRuntime":true,"source":"sbom.paths","metadata":{"environment":"prod","path_length":"3","artifact":"ghcr.io/stellaops/sample-api@sha256:111","nearest_safe_version":"pkg:npm/lodash@4.17.22","blast_radius":"medium","scope":"runtime"}}],"environmentFlags":{"prod":"2"},"blastRadius":{"impactedAssets":2,"impactedWorkloads":1,"impactedNamespaces":1,"impactedPercentage":0.5,"metadata":{"path_sample_count":"2","blast_radius_tags":"low,medium"}},"metadata":{"generated_at":"2025-12-08T15:34:22.6874898+00:00","artifact":"ghcr.io/stellaops/sample-api","version_count":"2","dependency_count":"2","source":"sbom-service","environment_flag_count":"1","blast_radius_present":"True"},"hash":"sha256:0c705259fdf984bf300baba0abf484fc3bbae977cf8a0a2d1877481f552d600d"}
|
||||
14
devops/offline/fixtures/notifier/artifact-hashes.json
Normal file
14
devops/offline/fixtures/notifier/artifact-hashes.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"hash_algorithm": "blake3-256",
|
||||
"entries": [
|
||||
{ "path": "docs/notifications/schemas/notify-schemas-catalog.json", "digest": "34e8655b0c7ca70c844d4b9aee56bdd7bd30b6a8666d2af75a70856b16f5605d" },
|
||||
{ "path": "docs/notifications/schemas/notify-schemas-catalog.dsse.json", "digest": "7c537ff728312cefb0769568bd376adc2bd79f6926173bf21f50c873902133dc" },
|
||||
{ "path": "docs/notifications/gaps-nr1-nr10.md", "digest": "b889dfd19a9d0a0f7bafb958135fde151e63c1e5259453d592d6519ae1667819" },
|
||||
{ "path": "docs/notifications/fixtures/rendering/index.ndjson", "digest": "3a41e62687b6e04f50e86ea74706eeae28eef666d7c4dbb5dc2281e6829bf41a" },
|
||||
{ "path": "docs/notifications/fixtures/redaction/sample.json", "digest": "dd4eefc8dded5d6f46c832e959ba0eef95ee8b77f10ac0aae90f7c89ad42906c" },
|
||||
{ "path": "docs/notifications/operations/dashboards/notify-slo.json", "digest": "8b380cb5491727a3ec69d50789f5522ac66c97804bebbf7de326568e52b38fa9" },
|
||||
{ "path": "docs/notifications/operations/alerts/notify-slo-alerts.yaml", "digest": "2c3b702c42d3e860c7f4e51d577f77961e982e1d233ef5ec392cba5414a0056d" },
|
||||
{ "path": "offline/notifier/notify-kit.manifest.json", "digest": "15e0b2f670e6b8089c6c960e354f16ba8201d993a077a28794a30b8d1cb23e9a" },
|
||||
{ "path": "offline/notifier/notify-kit.manifest.dsse.json", "digest": "68742f4e5bd202afe2cc90964d51fea7971395f3e57a875ae7111dcbb760321e" }
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"payloadType": "application/vnd.notify.manifest+json",
|
||||
"payload": "ewogICJzY2hlbWFfdmVyc2lvbiI6ICJ2MS4wIiwKICAiZ2VuZXJhdGVkX2F0IjogIjIwMjUtMTItMDRUMDA6MDA6MDBaIiwKICAidGVuYW50X3Njb3BlIjogIioiLAogICJlbnZpcm9ubWVudCI6ICJvZmZsaW5lIiwKICAiYXJ0aWZhY3RzIjogWwogICAgeyAibmFtZSI6ICJzY2hlbWEtY2F0YWxvZyIsICJwYXRoIjogImRvY3Mvbm90aWZpY2F0aW9ucy9zY2hlbWFzL25vdGlmeS1zY2hlbWFzLWNhdGFsb2cuanNvbiIsICJkaWdlc3QiOiAiMzRlODY1NWIwYzdjYTcwYzg0NGQ0YjlhZWU1NmJkZDdiZDMwYjZhODY2NmQyYWY3NWE3MDg1NmIxNmY1NjA1ZCIgfSwKICAgIHsgIm5hbWUiOiAic2NoZW1hLWNhdGFsb2ctZHNzZSIsICJwYXRoIjogImRvY3Mvbm90aWZpY2F0aW9ucy9zY2hlbWFzL25vdGlmeS1zY2hlbWFzLWNhdGFsb2cuZHNzZS5qc29uIiwgImRpZ2VzdCI6ICI3YzUzN2ZmNzI4MzEyY2VmYjA3Njk1NjhiZDM3NmFkYzJiZDc5ZjY5MjYxNzNiZjIxZjUwYzg3MzkwMjEzM2RjIiB9LAogICAgeyAibmFtZSI6ICJydWxlcyIsICJwYXRoIjogImRvY3Mvbm90aWZpY2F0aW9ucy9nYXBzLW5yMS1ucjEwLm1kIiwgImRpZ2VzdCI6ICJiODg5ZGZkMTlhOWQwYTBmN2JhZmI5NTgxMzVmZGUxNTFlNjNjMWU1MjU5NDUzZDU5MmQ2NTE5YWUxNjY3ODE5IiB9LAogICAgeyAibmFtZSI6ICJmaXh0dXJlcy1yZW5kZXJpbmciLCAicGF0aCI6ICJkb2NzL25vdGlmaWNhdGlvbnMvZml4dHVyZXMvcmVuZGVyaW5nL2luZGV4Lm5kanNvbiIsICJkaWdlc3QiOiAiM2E0MWU2MjY4N2I2ZTA0ZjUwZTg2ZWE3NDcwNmVlYWUyOGVlZjY2NmQ3YzRkYmI1ZGMyMjgxZTY4MjliZjQxYSIgfSwKICAgIHsgIm5hbWUiOiAiZml4dHVyZXMtcmVkYWN0aW9uIiwgInBhdGgiOiAiZG9jcy9ub3RpZmljYXRpb25zL2ZpeHR1cmVzL3JlZGFjdGlvbi9zYW1wbGUuanNvbiIsICJkaWdlc3QiOiAiZGQ0ZWVmYzhkZGVkNWQ2ZjQ2YzgzMmU5NTliYTBlZWY5NWVlOGI3N2YxMGFjMGFhZTkwZjdjODlhZDQyOTA2YyIgfSwKICAgIHsgIm5hbWUiOiAiZGFzaGJvYXJkcyIsICJwYXRoIjogImRvY3Mvbm90aWZpY2F0aW9ucy9vcGVyYXRpb25zL2Rhc2hib2FyZHMvbm90aWZ5LXNsby5qc29uIiwgImRpZ2VzdCI6ICI4YjM4MGNiNTQ5MTcyN2EzZWM2OWQ1MDc4OWY1NTIyYWM2NmM5NzgwNGJlYmJmN2RlMzI2NTY4ZTUyYjM4ZmE5IiB9LAogICAgeyAibmFtZSI6ICJhbGVydHMiLCAicGF0aCI6ICJkb2NzL25vdGlmaWNhdGlvbnMvb3BlcmF0aW9ucy9hbGVydHMvbm90aWZ5LXNsby1hbGVydHMueWFtbCIsICJkaWdlc3QiOiAiMmMzYjcwMmM0MmQzZTg2MGM3ZjRlNTFkNTc3Zjc3OTYxZTk4MmUxZDIzM2VmNWVjMzkyY2JhNTQxNGEwMDU2ZCIgfQogIF0sCiAgImhhc2hfYWxnb3JpdGhtIjogImJsYWtlMy0yNTYiLAogICJjYW5vbmljYWxpemF0aW9uIjogImpzb24tbm9ybWFsaXplZC11dGY4Igp9Cg==",
|
||||
"signatures": [
|
||||
{
|
||||
"sig": "DZwohxh6AOAP7Qf9geoZjw2jTXVU3rR8sYw4mgKpMu0=",
|
||||
"keyid": "notify-dev-hmac-001",
|
||||
"signedAt": "2025-12-04T21:13:10+00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
17
devops/offline/fixtures/notifier/notify-kit.manifest.json
Normal file
17
devops/offline/fixtures/notifier/notify-kit.manifest.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"schema_version": "v1.0",
|
||||
"generated_at": "2025-12-04T00:00:00Z",
|
||||
"tenant_scope": "*",
|
||||
"environment": "offline",
|
||||
"artifacts": [
|
||||
{ "name": "schema-catalog", "path": "docs/notifications/schemas/notify-schemas-catalog.json", "digest": "34e8655b0c7ca70c844d4b9aee56bdd7bd30b6a8666d2af75a70856b16f5605d" },
|
||||
{ "name": "schema-catalog-dsse", "path": "docs/notifications/schemas/notify-schemas-catalog.dsse.json", "digest": "7c537ff728312cefb0769568bd376adc2bd79f6926173bf21f50c873902133dc" },
|
||||
{ "name": "rules", "path": "docs/notifications/gaps-nr1-nr10.md", "digest": "b889dfd19a9d0a0f7bafb958135fde151e63c1e5259453d592d6519ae1667819" },
|
||||
{ "name": "fixtures-rendering", "path": "docs/notifications/fixtures/rendering/index.ndjson", "digest": "3a41e62687b6e04f50e86ea74706eeae28eef666d7c4dbb5dc2281e6829bf41a" },
|
||||
{ "name": "fixtures-redaction", "path": "docs/notifications/fixtures/redaction/sample.json", "digest": "dd4eefc8dded5d6f46c832e959ba0eef95ee8b77f10ac0aae90f7c89ad42906c" },
|
||||
{ "name": "dashboards", "path": "docs/notifications/operations/dashboards/notify-slo.json", "digest": "8b380cb5491727a3ec69d50789f5522ac66c97804bebbf7de326568e52b38fa9" },
|
||||
{ "name": "alerts", "path": "docs/notifications/operations/alerts/notify-slo-alerts.yaml", "digest": "2c3b702c42d3e860c7f4e51d577f77961e982e1d233ef5ec392cba5414a0056d" }
|
||||
],
|
||||
"hash_algorithm": "blake3-256",
|
||||
"canonicalization": "json-normalized-utf8"
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-attest-expiry-warning-email-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "email",
|
||||
"key": "tmpl-attest-expiry-warning",
|
||||
"locale": "en-us",
|
||||
"renderMode": "html",
|
||||
"format": "email",
|
||||
"description": "Expiry warning for attestations approaching their expiration window.",
|
||||
"body": "<h2>Attestation expiry notice</h2>\n<p>The attestation for <code>{{payload.subject.repository}}</code> (digest {{payload.subject.digest}}) expires on <strong>{{payload.attestation.expiresAt}}</strong>.</p>\n<ul>\n <li>Issued: {{payload.attestation.issuedAt}}</li>\n <li>Signer: <code>{{payload.signer.kid}}</code> ({{payload.signer.algorithm}})</li>\n <li>Time remaining: {{expires_in payload.attestation.expiresAt event.ts}}</li>\n</ul>\n<p>Please rotate the attestation before expiry using <a href=\"{{payload.links.docs}}\">these instructions</a>.</p>\n<p>Console: <a href=\"{{payload.links.console}}\">{{payload.links.console}}</a></p>\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-12"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-attest-expiry-warning-slack-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "slack",
|
||||
"key": "tmpl-attest-expiry-warning",
|
||||
"locale": "en-us",
|
||||
"renderMode": "markdown",
|
||||
"format": "slack",
|
||||
"description": "Slack reminder for attestations approaching their expiration window.",
|
||||
"body": ":warning: Attestation for `{{payload.subject.digest}}` expires {{expires_in payload.attestation.expiresAt event.ts}}\nRepo: `{{payload.subject.repository}}`{{#if payload.subject.tag}} ({{payload.subject.tag}}){{/if}}\nSigner: `{{fingerprint payload.signer.kid}}` ({{payload.signer.algorithm}})\nIssued: {{payload.attestation.issuedAt}} · Expires: {{payload.attestation.expiresAt}}\nRenewal steps: {{link \"Docs\" payload.links.docs}} · Console: {{link \"Open\" payload.links.console}}\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-16"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-attest-key-rotation-email-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "email",
|
||||
"key": "tmpl-attest-key-rotation",
|
||||
"locale": "en-us",
|
||||
"renderMode": "html",
|
||||
"format": "email",
|
||||
"description": "Email bulletin for attestation key rotation or revocation events.",
|
||||
"body": "<h2>Attestation key rotation notice</h2>\n<p>Authority rotated or revoked signing keys at {{payload.rotation.executedAt}}.</p>\n<ul>\n <li>Rotation batch: {{payload.rotation.batchId}}</li>\n <li>Impacted services: {{payload.rotation.impactedServices}}</li>\n <li>Reason: {{payload.rotation.reason}}</li>\n</ul>\n<p>Recommended action: {{payload.recommendation}}</p>\n<p>Docs: <a href=\"{{payload.links.docs}}\">Rotation playbook</a></p>\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-12"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-attest-key-rotation-webhook-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "webhook",
|
||||
"key": "tmpl-attest-key-rotation",
|
||||
"locale": "en-us",
|
||||
"renderMode": "json",
|
||||
"format": "webhook",
|
||||
"description": "Webhook payload for attestation key rotation/revocation events.",
|
||||
"body": "{\n \"event\": \"authority.keys.rotated\",\n \"tenantId\": \"{{event.tenant}}\",\n \"batchId\": \"{{payload.rotation.batchId}}\",\n \"executedAt\": \"{{payload.rotation.executedAt}}\",\n \"impactedServices\": \"{{payload.rotation.impactedServices}}\",\n \"reason\": \"{{payload.rotation.reason}}\",\n \"links\": {\n \"docs\": \"{{payload.links.docs}}\",\n \"console\": \"{{payload.links.console}}\"\n }\n}\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-12"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-attest-transparency-anomaly-slack-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "slack",
|
||||
"key": "tmpl-attest-transparency-anomaly",
|
||||
"locale": "en-us",
|
||||
"renderMode": "markdown",
|
||||
"format": "slack",
|
||||
"description": "Slack alert for transparency witness anomalies.",
|
||||
"body": ":warning: Transparency anomaly detected for `{{payload.subject.digest}}`\nWitness: `{{payload.transparency.witnessId}}` ({{payload.transparency.classification}})\nRekor index: {{payload.transparency.rekorIndex}}\nAnomaly window: {{payload.transparency.windowStart}} → {{payload.transparency.windowEnd}}\nRecommended action: {{payload.recommendation}}\nConsole details: {{link \"Open in Console\" payload.links.console}}\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-12"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-attest-transparency-anomaly-webhook-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "webhook",
|
||||
"key": "tmpl-attest-transparency-anomaly",
|
||||
"locale": "en-us",
|
||||
"renderMode": "json",
|
||||
"format": "webhook",
|
||||
"description": "Webhook payload for Rekor transparency anomalies.",
|
||||
"body": "{\n \"event\": \"attestor.transparency.anomaly\",\n \"tenantId\": \"{{event.tenant}}\",\n \"subjectDigest\": \"{{payload.subject.digest}}\",\n \"witnessId\": \"{{payload.transparency.witnessId}}\",\n \"classification\": \"{{payload.transparency.classification}}\",\n \"rekorIndex\": {{payload.transparency.rekorIndex}},\n \"window\": {\n \"start\": \"{{payload.transparency.windowStart}}\",\n \"end\": \"{{payload.transparency.windowEnd}}\"\n },\n \"links\": {\n \"console\": \"{{payload.links.console}}\",\n \"rekor\": \"{{payload.links.rekor}}\"\n },\n \"recommendation\": \"{{payload.recommendation}}\"\n}\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-12"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-attest-verify-fail-email-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "email",
|
||||
"key": "tmpl-attest-verify-fail",
|
||||
"locale": "en-us",
|
||||
"renderMode": "html",
|
||||
"format": "email",
|
||||
"description": "Email notice for attestation verification failures.",
|
||||
"body": "<h2>Attestation verification failure</h2>\n<p>The attestation for <code>{{payload.subject.repository}}</code> (digest {{payload.subject.digest}}) failed verification at {{event.ts}}.</p>\n<ul>\n <li>Reason: <code>{{payload.failure.reasonCode}}</code> — {{payload.failure.reason}}</li>\n <li>Signer: <code>{{payload.signer.kid}}</code> ({{payload.signer.algorithm}})</li>\n <li>Rekor entry: <a href=\"{{payload.links.rekor}}\">{{payload.links.rekor}}</a></li>\n <li>Last valid attestation: <a href=\"{{payload.links.console}}\">Console report</a></li>\n</ul>\n<p>{{payload.recommendation}}</p>\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-12"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-attest-verify-fail-slack-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "slack",
|
||||
"key": "tmpl-attest-verify-fail",
|
||||
"locale": "en-us",
|
||||
"renderMode": "markdown",
|
||||
"format": "slack",
|
||||
"description": "Slack alert for attestation verification failures with Rekor traceability.",
|
||||
"body": ":rotating_light: {{attestation_status_badge payload.failure.status}} verification failed for `{{payload.subject.digest}}`\nSigner: `{{fingerprint payload.signer.kid}}` ({{payload.signer.algorithm}})\nReason: `{{payload.failure.reasonCode}}` — {{payload.failure.reason}}\nLast valid attestation: {{link \"Console\" payload.links.console}}\nRekor entry: {{link \"Transparency log\" payload.links.rekor}}\nRecommended action: {{payload.recommendation}}\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-12"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-attest-verify-fail-webhook-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "webhook",
|
||||
"key": "tmpl-attest-verify-fail",
|
||||
"locale": "en-us",
|
||||
"renderMode": "json",
|
||||
"format": "webhook",
|
||||
"description": "JSON payload for Pager/SOC integrations on attestation verification failures.",
|
||||
"body": "{\n \"event\": \"attestor.verification.failed\",\n \"tenantId\": \"{{event.tenant}}\",\n \"subjectDigest\": \"{{payload.subject.digest}}\",\n \"repository\": \"{{payload.subject.repository}}\",\n \"reasonCode\": \"{{payload.failure.reasonCode}}\",\n \"reason\": \"{{payload.failure.reason}}\",\n \"signer\": {\n \"kid\": \"{{payload.signer.kid}}\",\n \"algorithm\": \"{{payload.signer.algorithm}}\"\n },\n \"rekor\": {\n \"url\": \"{{payload.links.rekor}}\",\n \"uuid\": \"{{payload.rekor.uuid}}\",\n \"index\": {{payload.rekor.index}}\n },\n \"recommendation\": \"{{payload.recommendation}}\"\n}\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-12"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-api-deprecation-email-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "email",
|
||||
"key": "tmpl-api-deprecation",
|
||||
"locale": "en-us",
|
||||
"renderMode": "html",
|
||||
"format": "email",
|
||||
"description": "Email notification for retiring Notifier API versions.",
|
||||
"body": "<h2>Notifier API deprecation notice</h2>\n<p>The Notifier API v1 endpoints are scheduled for sunset on <strong>{{metadata.sunset}}</strong>.</p>\n<ul>\n <li>Paths affected: {{metadata.paths}}</li>\n <li>Scope: notify.*</li>\n <li>Replacement: {{metadata.replacement}}</li>\n</ul>\n<p>Action: {{metadata.action}}</p>\n<p>Details: <a href=\"{{metadata.docs}}\">Deprecation bulletin</a></p>\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-17"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-api-deprecation-slack-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "slack",
|
||||
"key": "tmpl-api-deprecation",
|
||||
"locale": "en-us",
|
||||
"renderMode": "markdown",
|
||||
"format": "slack",
|
||||
"description": "Slack notice for retiring Notifier API versions.",
|
||||
"body": ":warning: Notifier API v1 is being deprecated.\nSunset: {{metadata.sunset}}\nPaths affected: {{metadata.paths}}\nDocs: {{link \"Deprecation details\" metadata.docs}}\nAction: {{metadata.action}}\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-17"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-risk-profile-state-email-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "email",
|
||||
"key": "tmpl-risk-profile-state",
|
||||
"locale": "en-us",
|
||||
"renderMode": "html",
|
||||
"format": "email",
|
||||
"description": "Email notice when risk profiles are published, deprecated, or thresholds change.",
|
||||
"body": "<h2>Risk profile update</h2>\n<p>Profile <strong>{{payload.profile.id}}</strong> is now <strong>{{payload.state}}</strong> (version {{payload.profile.version}}).</p>\n<ul>\n <li>Thresholds: {{payload.thresholds}}</li>\n <li>Owner: {{payload.owner}}</li>\n <li>Effective at: {{payload.effectiveAt}}</li>\n</ul>\n<p>Notes: {{payload.notes}}</p>\n<p>Console: <a href=\"{{payload.links.console}}\">View profile</a></p>\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-24"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-risk-profile-state-slack-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "slack",
|
||||
"key": "tmpl-risk-profile-state",
|
||||
"locale": "en-us",
|
||||
"renderMode": "markdown",
|
||||
"format": "json",
|
||||
"description": "Slack notice when risk profiles publish, deprecate, or thresholds change.",
|
||||
"body": "*Risk profile {{payload.profile.id}}* is now *{{payload.state}}* (v{{payload.profile.version}})\n• thresholds: {{payload.thresholds}}\n• owner: {{payload.owner}}\n• effective: {{payload.effectiveAt}}\n<{{payload.links.console}}|View profile>",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-24"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-risk-severity-change-email-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "email",
|
||||
"key": "tmpl-risk-severity-change",
|
||||
"locale": "en-us",
|
||||
"renderMode": "html",
|
||||
"format": "email",
|
||||
"description": "Email notice for risk severity escalation or downgrade.",
|
||||
"body": "<h2>Risk severity updated</h2>\n<p>Risk profile <strong>{{payload.profile.id}}</strong> changed severity from {{payload.previous.severity}} to {{payload.current.severity}} at {{event.ts}}.</p>\n<ul>\n <li>Asset: {{payload.asset.purl}}</li>\n <li>Profile version: {{payload.profile.version}}</li>\n <li>Reason: {{payload.reason}}</li>\n</ul>\n<p>View details: <a href=\"{{payload.links.console}}\">Console</a></p>\n",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-24"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"schemaVersion": "notify.template@1",
|
||||
"templateId": "tmpl-risk-severity-change-slack-en-us",
|
||||
"tenantId": "bootstrap",
|
||||
"channelType": "slack",
|
||||
"key": "tmpl-risk-severity-change",
|
||||
"locale": "en-us",
|
||||
"renderMode": "markdown",
|
||||
"format": "json",
|
||||
"description": "Slack notice for risk severity escalation or downgrade.",
|
||||
"body": "*Risk severity changed* for {{payload.profile.id}}\n• from: {{payload.previous.severity}} → to: {{payload.current.severity}}\n• asset: {{payload.asset.purl}}\n• version: {{payload.profile.version}}\n• reason: {{payload.reason}}\n<{{payload.links.console}}|Open in console>",
|
||||
"metadata": {
|
||||
"author": "notifications-bootstrap",
|
||||
"version": "2025-11-24"
|
||||
}
|
||||
}
|
||||
56
devops/offline/fixtures/notifier/verify_notify_kit.sh
Normal file
56
devops/offline/fixtures/notifier/verify_notify_kit.sh
Normal file
@@ -0,0 +1,56 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT=$(cd "$(dirname "$0")" && pwd)
|
||||
|
||||
missing=0
|
||||
for f in notify-kit.manifest.json notify-kit.manifest.dsse.json artifact-hashes.json; do
|
||||
if [ ! -f "$ROOT/$f" ]; then
|
||||
echo "[FAIL] missing $f" >&2
|
||||
missing=1
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$missing" -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
python - <<'PY'
|
||||
import json, sys, pathlib, base64
|
||||
try:
|
||||
import blake3
|
||||
except ImportError:
|
||||
sys.stderr.write("blake3 module missing; install with `python -m pip install blake3`\n")
|
||||
sys.exit(1)
|
||||
|
||||
if '__file__' in globals() and __file__ not in (None, '<stdin>'):
|
||||
root = pathlib.Path(__file__).resolve().parent
|
||||
else:
|
||||
root = pathlib.Path.cwd()
|
||||
hashes = json.loads((root / "artifact-hashes.json").read_text())
|
||||
|
||||
def h(path: pathlib.Path):
|
||||
if path.suffix == ".json":
|
||||
data = json.dumps(json.loads(path.read_text()), sort_keys=True, separators=(',', ':')).encode()
|
||||
else:
|
||||
data = path.read_bytes()
|
||||
return blake3.blake3(data).hexdigest()
|
||||
|
||||
ok = True
|
||||
for entry in hashes["entries"]:
|
||||
path = root.parent.parent / entry["path"]
|
||||
digest = entry["digest"]
|
||||
if not path.exists():
|
||||
sys.stderr.write(f"[FAIL] missing file {path}\n")
|
||||
ok = False
|
||||
continue
|
||||
actual = h(path)
|
||||
if actual != digest:
|
||||
sys.stderr.write(f"[FAIL] digest mismatch {path}: expected {digest}, got {actual}\n")
|
||||
ok = False
|
||||
|
||||
if not ok:
|
||||
sys.exit(1)
|
||||
|
||||
print("[OK] All artifact hashes verified with blake3.")
|
||||
PY
|
||||
@@ -0,0 +1,39 @@
|
||||
groups:
|
||||
- name: ledger-observability
|
||||
interval: 30s
|
||||
rules:
|
||||
- alert: LedgerWriteLatencyHighP95
|
||||
expr: histogram_quantile(0.95, sum(rate(ledger_write_latency_seconds_bucket[5m])) by (le, tenant)) > 0.12
|
||||
for: 10m
|
||||
labels:
|
||||
severity: warning
|
||||
annotations:
|
||||
summary: "Ledger write latency p95 high (tenant {{ $labels.tenant }})"
|
||||
description: "ledger_write_latency_seconds p95 > 120ms for >10m. Check DB/queue."
|
||||
|
||||
- alert: ProjectionLagHigh
|
||||
expr: max_over_time(ledger_projection_lag_seconds[10m]) > 30
|
||||
for: 10m
|
||||
labels:
|
||||
severity: critical
|
||||
annotations:
|
||||
summary: "Ledger projection lag high"
|
||||
description: "projection lag over 30s; projections falling behind ingest."
|
||||
|
||||
- alert: MerkleAnchorFailures
|
||||
expr: sum(rate(ledger_merkle_anchor_failures_total[15m])) by (tenant, reason) > 0
|
||||
for: 15m
|
||||
labels:
|
||||
severity: critical
|
||||
annotations:
|
||||
summary: "Merkle anchor failures (tenant {{ $labels.tenant }})"
|
||||
description: "Anchoring failures detected (reason={{ $labels.reason }}). Investigate signing/storage."
|
||||
|
||||
- alert: AttachmentFailures
|
||||
expr: sum(rate(ledger_attachments_encryption_failures_total[10m])) by (tenant, stage) > 0
|
||||
for: 10m
|
||||
labels:
|
||||
severity: warning
|
||||
annotations:
|
||||
summary: "Attachment pipeline failures (tenant {{ $labels.tenant }}, stage {{ $labels.stage }})"
|
||||
description: "Attachment encryption/sign/upload reported failures in the last 10m."
|
||||
@@ -0,0 +1,91 @@
|
||||
{
|
||||
"id": null,
|
||||
"title": "StellaOps Findings Ledger",
|
||||
"timezone": "utc",
|
||||
"schemaVersion": 39,
|
||||
"version": 1,
|
||||
"refresh": "30s",
|
||||
"tags": ["ledger", "findings", "stellaops"],
|
||||
"panels": [
|
||||
{
|
||||
"type": "timeseries",
|
||||
"title": "Ledger Write Latency (P50/P95)",
|
||||
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 0 },
|
||||
"targets": [
|
||||
{ "expr": "histogram_quantile(0.5, sum(rate(ledger_write_latency_seconds_bucket{tenant=\"$tenant\"}[5m])) by (le))", "legendFormat": "p50" },
|
||||
{ "expr": "histogram_quantile(0.95, sum(rate(ledger_write_latency_seconds_bucket{tenant=\"$tenant\"}[5m])) by (le))", "legendFormat": "p95" }
|
||||
],
|
||||
"fieldConfig": { "defaults": { "unit": "s" } }
|
||||
},
|
||||
{
|
||||
"type": "timeseries",
|
||||
"title": "Write Throughput",
|
||||
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 0 },
|
||||
"targets": [
|
||||
{ "expr": "sum(rate(ledger_events_total{tenant=\"$tenant\"}[5m])) by (event_type)", "legendFormat": "{{event_type}}" }
|
||||
],
|
||||
"fieldConfig": { "defaults": { "unit": "ops" } }
|
||||
},
|
||||
{
|
||||
"type": "timeseries",
|
||||
"title": "Projection Lag",
|
||||
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 8 },
|
||||
"targets": [
|
||||
{ "expr": "max(ledger_projection_lag_seconds{tenant=\"$tenant\"})", "legendFormat": "lag" }
|
||||
],
|
||||
"fieldConfig": { "defaults": { "unit": "s" } }
|
||||
},
|
||||
{
|
||||
"type": "timeseries",
|
||||
"title": "Merkle Anchor Duration",
|
||||
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 8 },
|
||||
"targets": [
|
||||
{ "expr": "histogram_quantile(0.95, sum(rate(ledger_merkle_anchor_duration_seconds_bucket{tenant=\"$tenant\"}[5m])) by (le))", "legendFormat": "p95" }
|
||||
],
|
||||
"fieldConfig": { "defaults": { "unit": "s" } }
|
||||
},
|
||||
{
|
||||
"type": "stat",
|
||||
"title": "Merkle Anchor Failures (5m)",
|
||||
"gridPos": { "h": 4, "w": 6, "x": 0, "y": 16 },
|
||||
"targets": [
|
||||
{ "expr": "sum(rate(ledger_merkle_anchor_failures_total{tenant=\"$tenant\"}[5m]))", "legendFormat": "fail/s" }
|
||||
],
|
||||
"options": { "reduceOptions": { "calcs": ["lastNotNull"] } }
|
||||
},
|
||||
{
|
||||
"type": "stat",
|
||||
"title": "Attachment Failures (5m)",
|
||||
"gridPos": { "h": 4, "w": 6, "x": 6, "y": 16 },
|
||||
"targets": [
|
||||
{ "expr": "sum(rate(ledger_attachments_encryption_failures_total{tenant=\"$tenant\"}[5m])) by (stage)", "legendFormat": "{{stage}}" }
|
||||
],
|
||||
"options": { "reduceOptions": { "calcs": ["lastNotNull"] } }
|
||||
},
|
||||
{
|
||||
"type": "stat",
|
||||
"title": "Ledger Backlog",
|
||||
"gridPos": { "h": 4, "w": 6, "x": 12, "y": 16 },
|
||||
"targets": [
|
||||
{ "expr": "sum(ledger_ingest_backlog_events{tenant=\"$tenant\"})", "legendFormat": "events" }
|
||||
]
|
||||
}
|
||||
],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"name": "tenant",
|
||||
"type": "query",
|
||||
"label": "Tenant",
|
||||
"datasource": null,
|
||||
"query": "label_values(ledger_events_total, tenant)",
|
||||
"refresh": 1,
|
||||
"multi": false,
|
||||
"includeAll": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"annotations": { "list": [] },
|
||||
"time": { "from": "now-6h", "to": "now" },
|
||||
"timepicker": { "refresh_intervals": ["30s", "1m", "5m", "15m", "1h"] }
|
||||
}
|
||||
6
devops/offline/templates/mirror-thin-v1.manifest.json
Normal file
6
devops/offline/templates/mirror-thin-v1.manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"created": "$CREATED",
|
||||
"indexes": [],
|
||||
"layers": [],
|
||||
"version": "1.0.0"
|
||||
}
|
||||
130
devops/scripts/add-testkit-reference.py
Normal file
130
devops/scripts/add-testkit-reference.py
Normal file
@@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Adds StellaOps.TestKit ProjectReference to test projects that use TestCategories
|
||||
but are missing the reference.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def get_relative_path_to_testkit(csproj_path: Path) -> str:
|
||||
"""Calculate relative path from csproj to TestKit project."""
|
||||
# TestKit is at src/__Libraries/StellaOps.TestKit/StellaOps.TestKit.csproj
|
||||
csproj_dir = csproj_path.parent
|
||||
src_root = None
|
||||
|
||||
# Walk up to find src directory
|
||||
current = csproj_dir
|
||||
depth = 0
|
||||
while current.name != 'src' and depth < 10:
|
||||
current = current.parent
|
||||
depth += 1
|
||||
|
||||
if current.name == 'src':
|
||||
src_root = current
|
||||
else:
|
||||
return None
|
||||
|
||||
# Calculate relative path from csproj to src/__Libraries/StellaOps.TestKit
|
||||
rel_path = os.path.relpath(
|
||||
src_root / '__Libraries' / 'StellaOps.TestKit' / 'StellaOps.TestKit.csproj',
|
||||
csproj_dir
|
||||
)
|
||||
# Normalize to forward slashes for XML
|
||||
return rel_path.replace('\\', '/')
|
||||
|
||||
|
||||
def project_uses_testkit(csproj_dir: Path) -> bool:
|
||||
"""Check if any .cs file in the project directory uses TestCategories."""
|
||||
for cs_file in csproj_dir.rglob('*.cs'):
|
||||
if '/obj/' in str(cs_file) or '/bin/' in str(cs_file):
|
||||
continue
|
||||
try:
|
||||
content = cs_file.read_text(encoding='utf-8-sig', errors='ignore')
|
||||
if 'TestCategories.' in content:
|
||||
return True
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def project_has_testkit_reference(content: str) -> bool:
|
||||
"""Check if csproj already references TestKit."""
|
||||
return 'StellaOps.TestKit' in content
|
||||
|
||||
|
||||
def add_testkit_reference(csproj_path: Path, dry_run: bool = False) -> bool:
|
||||
"""Add TestKit reference to csproj if needed."""
|
||||
try:
|
||||
content = csproj_path.read_text(encoding='utf-8')
|
||||
except Exception as e:
|
||||
print(f" Error reading {csproj_path}: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if project_has_testkit_reference(content):
|
||||
return False
|
||||
|
||||
if not project_uses_testkit(csproj_path.parent):
|
||||
return False
|
||||
|
||||
rel_path = get_relative_path_to_testkit(csproj_path)
|
||||
if not rel_path:
|
||||
print(f" Could not determine path to TestKit from {csproj_path}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Find a good place to insert the reference - look for existing ProjectReference
|
||||
if '<ProjectReference' in content:
|
||||
# Insert before the last </ItemGroup> that contains ProjectReference
|
||||
pattern = r'( <ProjectReference [^>]+/>\s*\n)( </ItemGroup>)'
|
||||
replacement = f'\\1 <ProjectReference Include="{rel_path}" />\n\\2'
|
||||
fixed = re.sub(pattern, replacement, content, count=1)
|
||||
else:
|
||||
# No ProjectReference, add a new ItemGroup before </Project>
|
||||
pattern = r'(</Project>)'
|
||||
new_item_group = f''' <ItemGroup>
|
||||
<ProjectReference Include="{rel_path}" />
|
||||
</ItemGroup>
|
||||
\\1'''
|
||||
fixed = re.sub(pattern, new_item_group, content)
|
||||
|
||||
if fixed == content:
|
||||
print(f" Could not find insertion point in {csproj_path}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if not dry_run:
|
||||
csproj_path.write_text(fixed, encoding='utf-8')
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='Add TestKit reference to test projects')
|
||||
parser.add_argument('--path', default='src', help='Path to scan')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Show what would be fixed')
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.path)
|
||||
fixed_count = 0
|
||||
|
||||
# Find all test project files
|
||||
for csproj in root.rglob('*.Tests.csproj'):
|
||||
if add_testkit_reference(csproj, dry_run=args.dry_run):
|
||||
print(f"{'Would add' if args.dry_run else 'Added'} TestKit reference to: {csproj}")
|
||||
fixed_count += 1
|
||||
|
||||
# Also check *UnitTests, *SmokeTests, etc.
|
||||
for pattern in ['*UnitTests.csproj', '*IntegrationTests.csproj', '*SmokeTests.csproj', '*FixtureTests.csproj']:
|
||||
for csproj in root.rglob(pattern):
|
||||
if add_testkit_reference(csproj, dry_run=args.dry_run):
|
||||
print(f"{'Would add' if args.dry_run else 'Added'} TestKit reference to: {csproj}")
|
||||
fixed_count += 1
|
||||
|
||||
print(f"\nAdded TestKit reference to: {fixed_count} projects")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
69
devops/scripts/fix-misplaced-using.ps1
Normal file
69
devops/scripts/fix-misplaced-using.ps1
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env pwsh
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Fixes misplaced 'using StellaOps.TestKit;' statements in test files.
|
||||
|
||||
.DESCRIPTION
|
||||
The validate-test-traits.py --fix script has a bug that inserts
|
||||
'using StellaOps.TestKit;' after 'using var' statements inside methods,
|
||||
causing compilation errors.
|
||||
|
||||
This script:
|
||||
1. Finds all affected .cs files
|
||||
2. Removes the misplaced 'using StellaOps.TestKit;' lines
|
||||
3. Ensures 'using StellaOps.TestKit;' exists at the top of the file
|
||||
#>
|
||||
|
||||
param(
|
||||
[string]$Path = "src",
|
||||
[switch]$DryRun
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Pattern to find misplaced using statements (after 'using var' in method body)
|
||||
$brokenPattern = "(?m)^(\s*using var .+;\s*\r?\n)(using StellaOps\.TestKit;\s*\r?\n)"
|
||||
|
||||
# Counter for fixed files
|
||||
$fixedCount = 0
|
||||
$checkedCount = 0
|
||||
|
||||
# Get all .cs test files
|
||||
$files = Get-ChildItem -Path $Path -Recurse -Include "*.cs" |
|
||||
Where-Object { $_.FullName -match "Tests?" }
|
||||
|
||||
foreach ($file in $files) {
|
||||
$checkedCount++
|
||||
$content = Get-Content -Path $file.FullName -Raw -Encoding UTF8
|
||||
|
||||
# Check if file has the broken pattern
|
||||
if ($content -match $brokenPattern) {
|
||||
Write-Host "Fixing: $($file.FullName)" -ForegroundColor Yellow
|
||||
|
||||
# Remove all misplaced 'using StellaOps.TestKit;' lines
|
||||
$fixed = $content -replace $brokenPattern, '$1'
|
||||
|
||||
# Check if 'using StellaOps.TestKit;' exists at the top of the file (in the using block)
|
||||
$hasTopUsing = $fixed -match "(?m)^using StellaOps\.TestKit;\s*$"
|
||||
|
||||
if (-not $hasTopUsing) {
|
||||
# Find the last 'using' statement at the top of the file and add after it
|
||||
$fixed = $fixed -replace "(?m)(^using [^;]+;\s*\r?\n)(?!using)", "`$1using StellaOps.TestKit;`r`n"
|
||||
}
|
||||
|
||||
if (-not $DryRun) {
|
||||
# Preserve BOM if original file had one
|
||||
$encoding = [System.Text.UTF8Encoding]::new($true)
|
||||
[System.IO.File]::WriteAllText($file.FullName, $fixed, $encoding)
|
||||
}
|
||||
|
||||
$fixedCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nChecked: $checkedCount files" -ForegroundColor Cyan
|
||||
Write-Host "Fixed: $fixedCount files" -ForegroundColor Green
|
||||
|
||||
if ($DryRun) {
|
||||
Write-Host "`n(Dry run - no files were modified)" -ForegroundColor Magenta
|
||||
}
|
||||
109
devops/scripts/fix-misplaced-using.py
Normal file
109
devops/scripts/fix-misplaced-using.py
Normal file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Fixes misplaced 'using StellaOps.TestKit;' statements in test files.
|
||||
|
||||
The validate-test-traits.py --fix script has a bug that inserts
|
||||
'using StellaOps.TestKit;' after 'using var' statements inside methods,
|
||||
causing CS1001 compilation errors.
|
||||
|
||||
This script:
|
||||
1. Finds all affected .cs files
|
||||
2. Removes the misplaced 'using StellaOps.TestKit;' lines (inside methods)
|
||||
3. Ensures 'using StellaOps.TestKit;' exists at the top of the file
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def fix_file(file_path: Path, dry_run: bool = False) -> bool:
|
||||
"""Fix a single file by removing misplaced using statements."""
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8-sig') # Handle BOM
|
||||
except Exception as e:
|
||||
print(f" Error reading {file_path}: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
original = content
|
||||
|
||||
# Pattern to find 'using var' followed by 'using StellaOps.TestKit;' (bug)
|
||||
# This matches the broken pattern inside method bodies
|
||||
broken_pattern = re.compile(
|
||||
r'(using var [^;]+;\s*\n)(using StellaOps\.TestKit;\s*\n)',
|
||||
re.MULTILINE
|
||||
)
|
||||
|
||||
# Check if file has the broken pattern
|
||||
if not broken_pattern.search(content):
|
||||
return False
|
||||
|
||||
# Remove all misplaced 'using StellaOps.TestKit;' lines after 'using var'
|
||||
fixed = broken_pattern.sub(r'\1', content)
|
||||
|
||||
# Check if 'using StellaOps.TestKit;' exists at top of file (before namespace)
|
||||
namespace_match = re.search(r'^namespace\s+\w+', fixed, re.MULTILINE)
|
||||
if namespace_match:
|
||||
top_section = fixed[:namespace_match.start()]
|
||||
has_top_using = 'using StellaOps.TestKit;' in top_section
|
||||
|
||||
if not has_top_using:
|
||||
# Find the last 'using' statement before namespace and add after it
|
||||
last_using = None
|
||||
for match in re.finditer(r'^using [^;]+;\s*$', top_section, re.MULTILINE):
|
||||
last_using = match
|
||||
|
||||
if last_using:
|
||||
insert_pos = last_using.end()
|
||||
fixed = fixed[:insert_pos] + '\nusing StellaOps.TestKit;' + fixed[insert_pos:]
|
||||
|
||||
if fixed != original:
|
||||
if not dry_run:
|
||||
# Preserve UTF-8 BOM if present
|
||||
encoding = 'utf-8-sig' if content.startswith('\ufeff') else 'utf-8'
|
||||
file_path.write_text(fixed, encoding=encoding)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='Fix misplaced using statements')
|
||||
parser.add_argument('--path', default='src', help='Path to scan')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Show what would be fixed')
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.path)
|
||||
if not root.exists():
|
||||
print(f"Path not found: {root}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
fixed_count = 0
|
||||
checked_count = 0
|
||||
|
||||
# Find all test .cs files
|
||||
for file_path in root.rglob('*.cs'):
|
||||
# Skip non-test files
|
||||
if '/obj/' in str(file_path) or '/bin/' in str(file_path):
|
||||
continue
|
||||
if 'node_modules' in str(file_path):
|
||||
continue
|
||||
if 'Test' not in str(file_path):
|
||||
continue
|
||||
|
||||
checked_count += 1
|
||||
if fix_file(file_path, dry_run=args.dry_run):
|
||||
print(f"{'Would fix' if args.dry_run else 'Fixed'}: {file_path}")
|
||||
fixed_count += 1
|
||||
|
||||
print(f"\nChecked: {checked_count} files")
|
||||
print(f"Fixed: {fixed_count} files")
|
||||
|
||||
if args.dry_run:
|
||||
print("\n(Dry run - no files were modified)")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
82
devops/scripts/fix-missing-testkit-using.py
Normal file
82
devops/scripts/fix-missing-testkit-using.py
Normal file
@@ -0,0 +1,82 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Adds 'using StellaOps.TestKit;' to files that use TestCategories but are missing the import.
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def fix_file(file_path: Path, dry_run: bool = False) -> bool:
|
||||
"""Add using StellaOps.TestKit; to files that need it."""
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8-sig')
|
||||
except Exception as e:
|
||||
print(f" Error reading {file_path}: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Check if file uses TestCategories
|
||||
if 'TestCategories.' not in content:
|
||||
return False
|
||||
|
||||
# Check if 'using StellaOps.TestKit;' exists anywhere in the file
|
||||
if 'using StellaOps.TestKit;' in content:
|
||||
return False
|
||||
|
||||
# Find the namespace declaration
|
||||
namespace_match = re.search(r'^namespace\s+[\w.]+', content, re.MULTILINE)
|
||||
if not namespace_match:
|
||||
print(f" No namespace found in {file_path}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Find the last 'using' statement before the namespace
|
||||
top_section = content[:namespace_match.start()]
|
||||
last_using = None
|
||||
for match in re.finditer(r'^using [^;]+;\s*$', top_section, re.MULTILINE):
|
||||
last_using = match
|
||||
|
||||
if last_using:
|
||||
insert_pos = last_using.end()
|
||||
fixed = content[:insert_pos] + '\nusing StellaOps.TestKit;' + content[insert_pos:]
|
||||
else:
|
||||
# No using statements, add at the beginning
|
||||
fixed = 'using StellaOps.TestKit;\n' + content
|
||||
|
||||
if not dry_run:
|
||||
encoding = 'utf-8-sig' if content.startswith('\ufeff') else 'utf-8'
|
||||
file_path.write_text(fixed, encoding=encoding)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='Add missing using StellaOps.TestKit statements')
|
||||
parser.add_argument('--path', default='src', help='Path to scan')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Show what would be fixed')
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.path)
|
||||
fixed_count = 0
|
||||
checked_count = 0
|
||||
|
||||
for file_path in root.rglob('*.cs'):
|
||||
if '/obj/' in str(file_path) or '/bin/' in str(file_path):
|
||||
continue
|
||||
if 'node_modules' in str(file_path):
|
||||
continue
|
||||
if 'Test' not in str(file_path):
|
||||
continue
|
||||
|
||||
checked_count += 1
|
||||
if fix_file(file_path, dry_run=args.dry_run):
|
||||
print(f"{'Would add' if args.dry_run else 'Added'} using to: {file_path}")
|
||||
fixed_count += 1
|
||||
|
||||
print(f"\nChecked: {checked_count} files")
|
||||
print(f"Fixed: {fixed_count} files")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
57
devops/scripts/fix-testkit-newline.py
Normal file
57
devops/scripts/fix-testkit-newline.py
Normal file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Fixes missing newline between 'using StellaOps.TestKit;' and 'namespace'.
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def fix_file(file_path: Path, dry_run: bool = False) -> bool:
|
||||
"""Add newline between using StellaOps.TestKit; and namespace."""
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8-sig')
|
||||
except Exception as e:
|
||||
print(f" Error reading {file_path}: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Pattern: using StellaOps.TestKit;namespace
|
||||
if 'TestKit;namespace' not in content:
|
||||
return False
|
||||
|
||||
# Fix: Add newline between them
|
||||
fixed = content.replace('TestKit;namespace', 'TestKit;\nnamespace')
|
||||
|
||||
if not dry_run:
|
||||
encoding = 'utf-8-sig' if content.startswith('\ufeff') else 'utf-8'
|
||||
file_path.write_text(fixed, encoding=encoding)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='Fix missing newline between using and namespace')
|
||||
parser.add_argument('--path', default='src', help='Path to scan')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Show what would be fixed')
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.path)
|
||||
fixed_count = 0
|
||||
|
||||
for file_path in root.rglob('*.cs'):
|
||||
if '/obj/' in str(file_path) or '/bin/' in str(file_path):
|
||||
continue
|
||||
if 'node_modules' in str(file_path):
|
||||
continue
|
||||
|
||||
if fix_file(file_path, dry_run=args.dry_run):
|
||||
print(f"{'Would fix' if args.dry_run else 'Fixed'}: {file_path}")
|
||||
fixed_count += 1
|
||||
|
||||
print(f"\nFixed: {fixed_count} files")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
221
devops/scripts/init-config.sh
Normal file
221
devops/scripts/init-config.sh
Normal file
@@ -0,0 +1,221 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Initialize StellaOps configuration from sample files
|
||||
#
|
||||
# Usage:
|
||||
# ./devops/scripts/init-config.sh [profile]
|
||||
#
|
||||
# Profiles:
|
||||
# dev - Development environment (default)
|
||||
# stage - Staging environment
|
||||
# prod - Production environment
|
||||
# airgap - Air-gapped deployment
|
||||
#
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ROOT_DIR="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
ETC_DIR="${ROOT_DIR}/etc"
|
||||
|
||||
PROFILE="${1:-dev}"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
log_info() { echo -e "${BLUE}[INFO]${NC} $*"; }
|
||||
log_ok() { echo -e "${GREEN}[OK]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
|
||||
|
||||
# Validate profile
|
||||
case "${PROFILE}" in
|
||||
dev|stage|prod|airgap)
|
||||
log_info "Initializing configuration for profile: ${PROFILE}"
|
||||
;;
|
||||
*)
|
||||
log_error "Unknown profile: ${PROFILE}"
|
||||
echo "Valid profiles: dev, stage, prod, airgap"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Create directory structure
|
||||
create_directories() {
|
||||
log_info "Creating directory structure..."
|
||||
|
||||
local dirs=(
|
||||
"etc/authority/plugins"
|
||||
"etc/certificates/trust-roots"
|
||||
"etc/certificates/signing"
|
||||
"etc/concelier/sources"
|
||||
"etc/crypto/profiles/cn"
|
||||
"etc/crypto/profiles/eu"
|
||||
"etc/crypto/profiles/kr"
|
||||
"etc/crypto/profiles/ru"
|
||||
"etc/crypto/profiles/us-fips"
|
||||
"etc/env"
|
||||
"etc/llm-providers"
|
||||
"etc/notify/templates"
|
||||
"etc/plugins/notify"
|
||||
"etc/plugins/scanner/lang"
|
||||
"etc/plugins/scanner/os"
|
||||
"etc/policy/packs"
|
||||
"etc/policy/schemas"
|
||||
"etc/router"
|
||||
"etc/scanner"
|
||||
"etc/scheduler"
|
||||
"etc/scm-connectors"
|
||||
"etc/secrets"
|
||||
"etc/signals"
|
||||
"etc/vex"
|
||||
)
|
||||
|
||||
for dir in "${dirs[@]}"; do
|
||||
mkdir -p "${ROOT_DIR}/${dir}"
|
||||
done
|
||||
|
||||
log_ok "Directory structure created"
|
||||
}
|
||||
|
||||
# Copy sample files to active configs
|
||||
copy_sample_files() {
|
||||
log_info "Copying sample files..."
|
||||
|
||||
local count=0
|
||||
|
||||
# Find all .sample files
|
||||
while IFS= read -r -d '' sample_file; do
|
||||
# Determine target file (remove .sample extension)
|
||||
local target_file="${sample_file%.sample}"
|
||||
|
||||
# Skip if target already exists
|
||||
if [[ -f "${target_file}" ]]; then
|
||||
log_warn "Skipping (exists): ${target_file#${ROOT_DIR}/}"
|
||||
continue
|
||||
fi
|
||||
|
||||
cp "${sample_file}" "${target_file}"
|
||||
log_ok "Created: ${target_file#${ROOT_DIR}/}"
|
||||
((count++))
|
||||
done < <(find "${ETC_DIR}" -name "*.sample" -type f -print0 2>/dev/null)
|
||||
|
||||
log_info "Copied ${count} sample files"
|
||||
}
|
||||
|
||||
# Copy environment-specific profile
|
||||
copy_env_profile() {
|
||||
log_info "Setting up environment profile: ${PROFILE}"
|
||||
|
||||
local env_sample="${ETC_DIR}/env/${PROFILE}.env.sample"
|
||||
local env_target="${ROOT_DIR}/.env"
|
||||
|
||||
if [[ -f "${env_sample}" ]]; then
|
||||
if [[ -f "${env_target}" ]]; then
|
||||
log_warn ".env already exists, not overwriting"
|
||||
else
|
||||
cp "${env_sample}" "${env_target}"
|
||||
log_ok "Created .env from ${PROFILE} profile"
|
||||
fi
|
||||
else
|
||||
log_warn "No environment sample found for profile: ${PROFILE}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create .gitignore entries for active configs
|
||||
update_gitignore() {
|
||||
log_info "Updating .gitignore..."
|
||||
|
||||
local gitignore="${ROOT_DIR}/.gitignore"
|
||||
local entries=(
|
||||
"# Active configuration files (not samples)"
|
||||
"etc/**/*.yaml"
|
||||
"!etc/**/*.yaml.sample"
|
||||
"etc/**/*.json"
|
||||
"!etc/**/*.json.sample"
|
||||
"etc/**/env"
|
||||
"!etc/**/env.sample"
|
||||
"etc/secrets/*"
|
||||
"!etc/secrets/*.sample"
|
||||
"!etc/secrets/README.md"
|
||||
)
|
||||
|
||||
# Check if entries already exist
|
||||
if grep -q "# Active configuration files" "${gitignore}" 2>/dev/null; then
|
||||
log_warn ".gitignore already contains config entries"
|
||||
return
|
||||
fi
|
||||
|
||||
echo "" >> "${gitignore}"
|
||||
for entry in "${entries[@]}"; do
|
||||
echo "${entry}" >> "${gitignore}"
|
||||
done
|
||||
|
||||
log_ok "Updated .gitignore"
|
||||
}
|
||||
|
||||
# Validate the configuration
|
||||
validate_config() {
|
||||
log_info "Validating configuration..."
|
||||
|
||||
local errors=0
|
||||
|
||||
# Check for required directories
|
||||
local required_dirs=(
|
||||
"etc/scanner"
|
||||
"etc/authority"
|
||||
"etc/policy"
|
||||
)
|
||||
|
||||
for dir in "${required_dirs[@]}"; do
|
||||
if [[ ! -d "${ROOT_DIR}/${dir}" ]]; then
|
||||
log_error "Missing required directory: ${dir}"
|
||||
((errors++))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${errors} -gt 0 ]]; then
|
||||
log_error "Validation failed with ${errors} errors"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_ok "Configuration validated"
|
||||
}
|
||||
|
||||
# Print summary
|
||||
print_summary() {
|
||||
echo ""
|
||||
echo "========================================"
|
||||
echo " Configuration Initialized"
|
||||
echo "========================================"
|
||||
echo ""
|
||||
echo "Profile: ${PROFILE}"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo " 1. Review and customize configurations in etc/"
|
||||
echo " 2. Set sensitive values via environment variables"
|
||||
echo " 3. For crypto compliance, set STELLAOPS_CRYPTO_PROFILE"
|
||||
echo ""
|
||||
echo "Quick start:"
|
||||
echo " docker compose up -d"
|
||||
echo ""
|
||||
echo "Documentation:"
|
||||
echo " docs/operations/configuration-guide.md"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Main
|
||||
main() {
|
||||
create_directories
|
||||
copy_sample_files
|
||||
copy_env_profile
|
||||
update_gitignore
|
||||
validate_config
|
||||
print_summary
|
||||
}
|
||||
|
||||
main "$@"
|
||||
330
devops/scripts/migrate-config.sh
Normal file
330
devops/scripts/migrate-config.sh
Normal file
@@ -0,0 +1,330 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Migrate legacy configuration structure to consolidated etc/
|
||||
#
|
||||
# This script migrates:
|
||||
# - certificates/ -> etc/certificates/
|
||||
# - config/ -> etc/crypto/ and etc/env/
|
||||
# - policies/ -> etc/policy/
|
||||
# - etc/rootpack/ -> etc/crypto/profiles/
|
||||
#
|
||||
# Usage:
|
||||
# ./devops/scripts/migrate-config.sh [--dry-run]
|
||||
#
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ROOT_DIR="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
DRY_RUN=false
|
||||
[[ "${1:-}" == "--dry-run" ]] && DRY_RUN=true
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${BLUE}[INFO]${NC} $*"; }
|
||||
log_ok() { echo -e "${GREEN}[OK]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
|
||||
log_dry() { echo -e "${YELLOW}[DRY-RUN]${NC} $*"; }
|
||||
|
||||
# Execute or log command
|
||||
run_cmd() {
|
||||
if [[ "${DRY_RUN}" == true ]]; then
|
||||
log_dry "$*"
|
||||
else
|
||||
"$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create backup
|
||||
create_backup() {
|
||||
local backup_file="${ROOT_DIR}/config-backup-$(date +%Y%m%d-%H%M%S).tar.gz"
|
||||
|
||||
log_info "Creating backup: ${backup_file}"
|
||||
|
||||
if [[ "${DRY_RUN}" == true ]]; then
|
||||
log_dry "Would create backup of: certificates/ config/ policies/ etc/"
|
||||
return
|
||||
fi
|
||||
|
||||
local dirs_to_backup=()
|
||||
[[ -d "${ROOT_DIR}/certificates" ]] && dirs_to_backup+=("certificates")
|
||||
[[ -d "${ROOT_DIR}/config" ]] && dirs_to_backup+=("config")
|
||||
[[ -d "${ROOT_DIR}/policies" ]] && dirs_to_backup+=("policies")
|
||||
[[ -d "${ROOT_DIR}/etc" ]] && dirs_to_backup+=("etc")
|
||||
|
||||
if [[ ${#dirs_to_backup[@]} -gt 0 ]]; then
|
||||
cd "${ROOT_DIR}"
|
||||
tar -czvf "${backup_file}" "${dirs_to_backup[@]}"
|
||||
log_ok "Backup created: ${backup_file}"
|
||||
else
|
||||
log_warn "No directories to backup"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create new directory structure
|
||||
create_directories() {
|
||||
log_info "Creating new directory structure..."
|
||||
|
||||
local dirs=(
|
||||
"etc/certificates/trust-roots"
|
||||
"etc/certificates/signing"
|
||||
"etc/crypto/profiles/cn"
|
||||
"etc/crypto/profiles/eu"
|
||||
"etc/crypto/profiles/kr"
|
||||
"etc/crypto/profiles/ru"
|
||||
"etc/crypto/profiles/us-fips"
|
||||
"etc/env"
|
||||
"etc/policy/packs"
|
||||
"etc/policy/schemas"
|
||||
)
|
||||
|
||||
for dir in "${dirs[@]}"; do
|
||||
run_cmd mkdir -p "${ROOT_DIR}/${dir}"
|
||||
done
|
||||
|
||||
log_ok "Directory structure created"
|
||||
}
|
||||
|
||||
# Migrate certificates/
|
||||
migrate_certificates() {
|
||||
local src_dir="${ROOT_DIR}/certificates"
|
||||
|
||||
if [[ ! -d "${src_dir}" ]]; then
|
||||
log_info "No certificates/ directory found, skipping"
|
||||
return
|
||||
fi
|
||||
|
||||
log_info "Migrating certificates/..."
|
||||
|
||||
# Trust roots (CA bundles)
|
||||
for f in "${src_dir}"/*-bundle*.pem "${src_dir}"/*-root*.pem "${src_dir}"/*_bundle*.pem "${src_dir}"/*_root*.pem 2>/dev/null; do
|
||||
[[ -f "$f" ]] || continue
|
||||
run_cmd mv "$f" "${ROOT_DIR}/etc/certificates/trust-roots/"
|
||||
log_ok "Moved: $(basename "$f") -> etc/certificates/trust-roots/"
|
||||
done
|
||||
|
||||
# Signing keys
|
||||
for f in "${src_dir}"/*-signing-*.pem "${src_dir}"/*_signing_*.pem 2>/dev/null; do
|
||||
[[ -f "$f" ]] || continue
|
||||
run_cmd mv "$f" "${ROOT_DIR}/etc/certificates/signing/"
|
||||
log_ok "Moved: $(basename "$f") -> etc/certificates/signing/"
|
||||
done
|
||||
|
||||
# Move remaining .pem and .cer files to trust-roots
|
||||
for f in "${src_dir}"/*.pem "${src_dir}"/*.cer 2>/dev/null; do
|
||||
[[ -f "$f" ]] || continue
|
||||
run_cmd mv "$f" "${ROOT_DIR}/etc/certificates/trust-roots/"
|
||||
log_ok "Moved: $(basename "$f") -> etc/certificates/trust-roots/"
|
||||
done
|
||||
|
||||
# Remove empty directory
|
||||
if [[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}")" ]]; then
|
||||
run_cmd rmdir "${src_dir}"
|
||||
log_ok "Removed empty: certificates/"
|
||||
fi
|
||||
}
|
||||
|
||||
# Migrate config/
|
||||
migrate_config_dir() {
|
||||
local src_dir="${ROOT_DIR}/config"
|
||||
|
||||
if [[ ! -d "${src_dir}" ]]; then
|
||||
log_info "No config/ directory found, skipping"
|
||||
return
|
||||
fi
|
||||
|
||||
log_info "Migrating config/..."
|
||||
|
||||
# Map env files to crypto profiles
|
||||
declare -A env_mapping=(
|
||||
[".env.fips.example"]="us-fips/env.sample"
|
||||
[".env.eidas.example"]="eu/env.sample"
|
||||
[".env.ru-free.example"]="ru/env.sample"
|
||||
[".env.ru-paid.example"]="ru/env-paid.sample"
|
||||
[".env.sm.example"]="cn/env.sample"
|
||||
[".env.kcmvp.example"]="kr/env.sample"
|
||||
)
|
||||
|
||||
for src_name in "${!env_mapping[@]}"; do
|
||||
local src_file="${src_dir}/env/${src_name}"
|
||||
local dst_file="${ROOT_DIR}/etc/crypto/profiles/${env_mapping[$src_name]}"
|
||||
|
||||
if [[ -f "${src_file}" ]]; then
|
||||
run_cmd mkdir -p "$(dirname "${dst_file}")"
|
||||
run_cmd mv "${src_file}" "${dst_file}"
|
||||
log_ok "Moved: ${src_name} -> etc/crypto/profiles/${env_mapping[$src_name]}"
|
||||
fi
|
||||
done
|
||||
|
||||
# Remove crypto-profiles.sample.json (superseded)
|
||||
if [[ -f "${src_dir}/crypto-profiles.sample.json" ]]; then
|
||||
run_cmd rm "${src_dir}/crypto-profiles.sample.json"
|
||||
log_ok "Removed: config/crypto-profiles.sample.json (superseded by etc/crypto/)"
|
||||
fi
|
||||
|
||||
# Remove empty directories
|
||||
[[ -d "${src_dir}/env" ]] && [[ -z "$(ls -A "${src_dir}/env" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}/env"
|
||||
[[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}"
|
||||
}
|
||||
|
||||
# Migrate policies/
|
||||
migrate_policies() {
|
||||
local src_dir="${ROOT_DIR}/policies"
|
||||
|
||||
if [[ ! -d "${src_dir}" ]]; then
|
||||
log_info "No policies/ directory found, skipping"
|
||||
return
|
||||
fi
|
||||
|
||||
log_info "Migrating policies/..."
|
||||
|
||||
# Move policy packs
|
||||
for f in "${src_dir}"/*.yaml 2>/dev/null; do
|
||||
[[ -f "$f" ]] || continue
|
||||
run_cmd mv "$f" "${ROOT_DIR}/etc/policy/packs/"
|
||||
log_ok "Moved: $(basename "$f") -> etc/policy/packs/"
|
||||
done
|
||||
|
||||
# Move schemas
|
||||
if [[ -d "${src_dir}/schemas" ]]; then
|
||||
for f in "${src_dir}/schemas"/*.json 2>/dev/null; do
|
||||
[[ -f "$f" ]] || continue
|
||||
run_cmd mv "$f" "${ROOT_DIR}/etc/policy/schemas/"
|
||||
log_ok "Moved: schemas/$(basename "$f") -> etc/policy/schemas/"
|
||||
done
|
||||
[[ -z "$(ls -A "${src_dir}/schemas" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}/schemas"
|
||||
fi
|
||||
|
||||
# Move AGENTS.md if present
|
||||
[[ -f "${src_dir}/AGENTS.md" ]] && run_cmd mv "${src_dir}/AGENTS.md" "${ROOT_DIR}/etc/policy/"
|
||||
|
||||
# Remove empty directory
|
||||
[[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}"
|
||||
}
|
||||
|
||||
# Migrate etc/rootpack/ to etc/crypto/profiles/
|
||||
migrate_rootpack() {
|
||||
local src_dir="${ROOT_DIR}/etc/rootpack"
|
||||
|
||||
if [[ ! -d "${src_dir}" ]]; then
|
||||
log_info "No etc/rootpack/ directory found, skipping"
|
||||
return
|
||||
fi
|
||||
|
||||
log_info "Migrating etc/rootpack/ to etc/crypto/profiles/..."
|
||||
|
||||
for region_dir in "${src_dir}"/*; do
|
||||
[[ -d "${region_dir}" ]] || continue
|
||||
local region_name=$(basename "${region_dir}")
|
||||
local target_dir="${ROOT_DIR}/etc/crypto/profiles/${region_name}"
|
||||
|
||||
run_cmd mkdir -p "${target_dir}"
|
||||
|
||||
for f in "${region_dir}"/*; do
|
||||
[[ -f "$f" ]] || continue
|
||||
run_cmd mv "$f" "${target_dir}/"
|
||||
log_ok "Moved: rootpack/${region_name}/$(basename "$f") -> etc/crypto/profiles/${region_name}/"
|
||||
done
|
||||
|
||||
[[ -z "$(ls -A "${region_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${region_dir}"
|
||||
done
|
||||
|
||||
[[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}"
|
||||
}
|
||||
|
||||
# Validate migration
|
||||
validate_migration() {
|
||||
log_info "Validating migration..."
|
||||
|
||||
local errors=0
|
||||
|
||||
# Check new structure exists
|
||||
local required=(
|
||||
"etc/certificates"
|
||||
"etc/crypto/profiles"
|
||||
"etc/policy"
|
||||
)
|
||||
|
||||
for dir in "${required[@]}"; do
|
||||
if [[ ! -d "${ROOT_DIR}/${dir}" ]]; then
|
||||
log_error "Missing: ${dir}"
|
||||
((errors++))
|
||||
fi
|
||||
done
|
||||
|
||||
# Check legacy directories are gone
|
||||
local legacy=(
|
||||
"certificates"
|
||||
"config"
|
||||
"policies"
|
||||
"etc/rootpack"
|
||||
)
|
||||
|
||||
for dir in "${legacy[@]}"; do
|
||||
if [[ -d "${ROOT_DIR}/${dir}" ]] && [[ -n "$(ls -A "${ROOT_DIR}/${dir}" 2>/dev/null)" ]]; then
|
||||
log_warn "Legacy directory still has content: ${dir}"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${errors} -gt 0 ]]; then
|
||||
log_error "Validation failed"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_ok "Migration validated"
|
||||
}
|
||||
|
||||
# Print summary
|
||||
print_summary() {
|
||||
echo ""
|
||||
echo "========================================"
|
||||
if [[ "${DRY_RUN}" == true ]]; then
|
||||
echo " Migration Dry Run Complete"
|
||||
else
|
||||
echo " Migration Complete"
|
||||
fi
|
||||
echo "========================================"
|
||||
echo ""
|
||||
echo "New structure:"
|
||||
echo " etc/certificates/ - Trust anchors and signing keys"
|
||||
echo " etc/crypto/profiles/ - Regional crypto profiles"
|
||||
echo " etc/policy/ - Policy engine configuration"
|
||||
echo ""
|
||||
if [[ "${DRY_RUN}" == true ]]; then
|
||||
echo "Run without --dry-run to apply changes"
|
||||
else
|
||||
echo "Next steps:"
|
||||
echo " 1. Update Docker Compose volume mounts"
|
||||
echo " 2. Update any hardcoded paths in scripts"
|
||||
echo " 3. Restart services and validate"
|
||||
echo ""
|
||||
echo "Rollback:"
|
||||
echo " tar -xzvf config-backup-*.tar.gz"
|
||||
fi
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Main
|
||||
main() {
|
||||
if [[ "${DRY_RUN}" == true ]]; then
|
||||
log_info "DRY RUN - no changes will be made"
|
||||
fi
|
||||
|
||||
create_backup
|
||||
create_directories
|
||||
migrate_certificates
|
||||
migrate_config_dir
|
||||
migrate_policies
|
||||
migrate_rootpack
|
||||
validate_migration
|
||||
print_summary
|
||||
}
|
||||
|
||||
main "$@"
|
||||
343
devops/scripts/validate-test-traits.py
Normal file
343
devops/scripts/validate-test-traits.py
Normal file
@@ -0,0 +1,343 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Validate and report on test Category traits across the codebase.
|
||||
|
||||
Sprint: SPRINT_20251226_007_CICD
|
||||
|
||||
This script scans all test files in the codebase and reports:
|
||||
1. Test files with Category traits
|
||||
2. Test files missing Category traits
|
||||
3. Coverage percentage by module
|
||||
|
||||
Usage:
|
||||
python devops/scripts/validate-test-traits.py [--fix] [--module <name>]
|
||||
|
||||
Options:
|
||||
--fix Attempt to add default Unit trait to tests without categories
|
||||
--module Only process tests in the specified module
|
||||
--verbose Show detailed output
|
||||
--json Output as JSON for CI consumption
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Dict, Set, Optional
|
||||
|
||||
|
||||
VALID_CATEGORIES = {
|
||||
"Unit",
|
||||
"Integration",
|
||||
"Architecture",
|
||||
"Contract",
|
||||
"Security",
|
||||
"Golden",
|
||||
"Performance",
|
||||
"Benchmark",
|
||||
"AirGap",
|
||||
"Chaos",
|
||||
"Determinism",
|
||||
"Resilience",
|
||||
"Observability",
|
||||
"Property",
|
||||
"Snapshot",
|
||||
"Live",
|
||||
}
|
||||
|
||||
# Patterns to identify test methods and classes
|
||||
FACT_PATTERN = re.compile(r'\[Fact[^\]]*\]')
|
||||
THEORY_PATTERN = re.compile(r'\[Theory[^\]]*\]')
|
||||
# Match both string literals and TestCategories.Xxx constants
|
||||
# Also match inline format like [Fact, Trait("Category", ...)]
|
||||
TRAIT_CATEGORY_PATTERN = re.compile(
|
||||
r'Trait\s*\(\s*["\']Category["\']\s*,\s*(?:["\'](\w+)["\']|TestCategories\.(\w+))\s*\)'
|
||||
)
|
||||
TEST_CLASS_PATTERN = re.compile(r'public\s+(?:sealed\s+)?class\s+\w+.*Tests?\b')
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestFileAnalysis:
|
||||
path: str
|
||||
has_facts: bool = False
|
||||
has_theories: bool = False
|
||||
has_category_traits: bool = False
|
||||
categories_found: Set[str] = field(default_factory=set)
|
||||
test_method_count: int = 0
|
||||
categorized_test_count: int = 0
|
||||
|
||||
|
||||
def analyze_test_file(file_path: Path) -> TestFileAnalysis:
|
||||
"""Analyze a single test file for Category traits."""
|
||||
analysis = TestFileAnalysis(path=str(file_path))
|
||||
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not read {file_path}: {e}", file=sys.stderr)
|
||||
return analysis
|
||||
|
||||
# Check for test methods
|
||||
facts = FACT_PATTERN.findall(content)
|
||||
theories = THEORY_PATTERN.findall(content)
|
||||
|
||||
analysis.has_facts = len(facts) > 0
|
||||
analysis.has_theories = len(theories) > 0
|
||||
analysis.test_method_count = len(facts) + len(theories)
|
||||
|
||||
# Check for Category traits
|
||||
category_matches = TRAIT_CATEGORY_PATTERN.findall(content)
|
||||
if category_matches:
|
||||
analysis.has_category_traits = True
|
||||
# Pattern has two capture groups - one for string literal, one for constant
|
||||
# Extract non-empty values from tuples
|
||||
categories = set()
|
||||
for match in category_matches:
|
||||
cat = match[0] or match[1] # First non-empty group
|
||||
if cat:
|
||||
categories.add(cat)
|
||||
analysis.categories_found = categories
|
||||
analysis.categorized_test_count = len(category_matches)
|
||||
|
||||
return analysis
|
||||
|
||||
|
||||
def get_module_from_path(file_path: Path) -> str:
|
||||
"""Extract module name from file path."""
|
||||
parts = file_path.parts
|
||||
|
||||
# Look for src/<Module> pattern
|
||||
for i, part in enumerate(parts):
|
||||
if part == 'src' and i + 1 < len(parts):
|
||||
next_part = parts[i + 1]
|
||||
if next_part.startswith('__'):
|
||||
return next_part # e.g., __Tests, __Libraries
|
||||
return next_part
|
||||
|
||||
return "Unknown"
|
||||
|
||||
|
||||
def find_test_files(root_path: Path, module_filter: Optional[str] = None) -> List[Path]:
|
||||
"""Find all test files in the codebase."""
|
||||
test_files = []
|
||||
|
||||
for pattern in ['**/*.Tests.cs', '**/*Test.cs', '**/*Tests/*.cs']:
|
||||
for file_path in root_path.glob(pattern):
|
||||
# Skip generated files
|
||||
if '/obj/' in str(file_path) or '/bin/' in str(file_path):
|
||||
continue
|
||||
if 'node_modules' in str(file_path):
|
||||
continue
|
||||
|
||||
# Apply module filter if specified
|
||||
if module_filter:
|
||||
module = get_module_from_path(file_path)
|
||||
if module.lower() != module_filter.lower():
|
||||
continue
|
||||
|
||||
test_files.append(file_path)
|
||||
|
||||
return test_files
|
||||
|
||||
|
||||
def generate_report(analyses: List[TestFileAnalysis], verbose: bool = False) -> Dict:
|
||||
"""Generate a summary report from analyses."""
|
||||
total_files = len(analyses)
|
||||
files_with_tests = [a for a in analyses if a.has_facts or a.has_theories]
|
||||
files_with_traits = [a for a in analyses if a.has_category_traits]
|
||||
files_missing_traits = [a for a in files_with_tests if not a.has_category_traits]
|
||||
|
||||
# Group by module
|
||||
by_module: Dict[str, Dict] = {}
|
||||
for analysis in analyses:
|
||||
module = get_module_from_path(Path(analysis.path))
|
||||
if module not in by_module:
|
||||
by_module[module] = {
|
||||
'total': 0,
|
||||
'with_tests': 0,
|
||||
'with_traits': 0,
|
||||
'missing_traits': 0,
|
||||
'files_missing': []
|
||||
}
|
||||
|
||||
by_module[module]['total'] += 1
|
||||
if analysis.has_facts or analysis.has_theories:
|
||||
by_module[module]['with_tests'] += 1
|
||||
if analysis.has_category_traits:
|
||||
by_module[module]['with_traits'] += 1
|
||||
else:
|
||||
if analysis.has_facts or analysis.has_theories:
|
||||
by_module[module]['missing_traits'] += 1
|
||||
if verbose:
|
||||
by_module[module]['files_missing'].append(analysis.path)
|
||||
|
||||
# Calculate coverage
|
||||
coverage = (len(files_with_traits) / len(files_with_tests) * 100) if files_with_tests else 0
|
||||
|
||||
# Collect all categories found
|
||||
all_categories: Set[str] = set()
|
||||
for analysis in analyses:
|
||||
all_categories.update(analysis.categories_found)
|
||||
|
||||
return {
|
||||
'summary': {
|
||||
'total_test_files': total_files,
|
||||
'files_with_tests': len(files_with_tests),
|
||||
'files_with_category_traits': len(files_with_traits),
|
||||
'files_missing_traits': len(files_missing_traits),
|
||||
'coverage_percent': round(coverage, 1),
|
||||
'categories_used': sorted(all_categories),
|
||||
'valid_categories': sorted(VALID_CATEGORIES),
|
||||
},
|
||||
'by_module': by_module,
|
||||
'files_missing_traits': [a.path for a in files_missing_traits] if verbose else []
|
||||
}
|
||||
|
||||
|
||||
def add_default_trait(file_path: Path, default_category: str = "Unit") -> bool:
|
||||
"""Add default Category trait to test methods missing traits."""
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8')
|
||||
original = content
|
||||
|
||||
# Pattern to find [Fact] or [Theory] not preceded by Category trait
|
||||
# This is a simplified approach - adds trait after [Fact] or [Theory]
|
||||
|
||||
# Check if file already has Category traits
|
||||
if TRAIT_CATEGORY_PATTERN.search(content):
|
||||
return False # Already has some traits, skip
|
||||
|
||||
# Add using statement if not present
|
||||
if 'using StellaOps.TestKit;' not in content:
|
||||
# Find last using statement and add after it
|
||||
using_pattern = re.compile(r'(using [^;]+;\s*\n)(?!using)')
|
||||
match = list(using_pattern.finditer(content))
|
||||
if match:
|
||||
last_using = match[-1]
|
||||
insert_pos = last_using.end()
|
||||
content = content[:insert_pos] + 'using StellaOps.TestKit;\n' + content[insert_pos:]
|
||||
|
||||
# Add Trait to [Fact] attributes
|
||||
content = re.sub(
|
||||
r'(\[Fact\])',
|
||||
f'[Trait("Category", TestCategories.{default_category})]\n \\1',
|
||||
content
|
||||
)
|
||||
|
||||
# Add Trait to [Theory] attributes
|
||||
content = re.sub(
|
||||
r'(\[Theory\])',
|
||||
f'[Trait("Category", TestCategories.{default_category})]\n \\1',
|
||||
content
|
||||
)
|
||||
|
||||
if content != original:
|
||||
file_path.write_text(content, encoding='utf-8')
|
||||
return True
|
||||
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"Error processing {file_path}: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Validate test Category traits')
|
||||
parser.add_argument('--fix', action='store_true', help='Add default Unit trait to tests without categories')
|
||||
parser.add_argument('--module', type=str, help='Only process tests in the specified module')
|
||||
parser.add_argument('--verbose', '-v', action='store_true', help='Show detailed output')
|
||||
parser.add_argument('--json', action='store_true', help='Output as JSON')
|
||||
parser.add_argument('--category', type=str, default='Unit', help='Default category for --fix (default: Unit)')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Find repository root
|
||||
script_path = Path(__file__).resolve()
|
||||
repo_root = script_path.parent.parent.parent
|
||||
src_path = repo_root / 'src'
|
||||
|
||||
if not src_path.exists():
|
||||
print(f"Error: src directory not found at {src_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Find all test files
|
||||
test_files = find_test_files(src_path, args.module)
|
||||
|
||||
if not args.json:
|
||||
print(f"Found {len(test_files)} test files to analyze...")
|
||||
|
||||
# Analyze each file
|
||||
analyses = [analyze_test_file(f) for f in test_files]
|
||||
|
||||
# Generate report
|
||||
report = generate_report(analyses, args.verbose)
|
||||
|
||||
if args.json:
|
||||
print(json.dumps(report, indent=2))
|
||||
else:
|
||||
# Print summary
|
||||
summary = report['summary']
|
||||
print("\n" + "=" * 60)
|
||||
print("TEST CATEGORY TRAIT COVERAGE REPORT")
|
||||
print("=" * 60)
|
||||
print(f"Total test files: {summary['total_test_files']}")
|
||||
print(f"Files with test methods: {summary['files_with_tests']}")
|
||||
print(f"Files with Category trait: {summary['files_with_category_traits']}")
|
||||
print(f"Files missing traits: {summary['files_missing_traits']}")
|
||||
print(f"Coverage: {summary['coverage_percent']}%")
|
||||
print(f"\nCategories in use: {', '.join(summary['categories_used']) or 'None'}")
|
||||
print(f"Valid categories: {', '.join(summary['valid_categories'])}")
|
||||
|
||||
# Print by module
|
||||
print("\n" + "-" * 60)
|
||||
print("BY MODULE")
|
||||
print("-" * 60)
|
||||
print(f"{'Module':<25} {'With Tests':<12} {'With Traits':<12} {'Missing':<10}")
|
||||
print("-" * 60)
|
||||
|
||||
for module, data in sorted(report['by_module'].items()):
|
||||
if data['with_tests'] > 0:
|
||||
print(f"{module:<25} {data['with_tests']:<12} {data['with_traits']:<12} {data['missing_traits']:<10}")
|
||||
|
||||
# Show files missing traits if verbose
|
||||
if args.verbose and report['files_missing_traits']:
|
||||
print("\n" + "-" * 60)
|
||||
print("FILES MISSING CATEGORY TRAITS")
|
||||
print("-" * 60)
|
||||
for f in sorted(report['files_missing_traits'])[:50]: # Limit to first 50
|
||||
print(f" {f}")
|
||||
if len(report['files_missing_traits']) > 50:
|
||||
print(f" ... and {len(report['files_missing_traits']) - 50} more")
|
||||
|
||||
# Fix mode
|
||||
if args.fix:
|
||||
files_to_fix = [Path(a.path) for a in analyses
|
||||
if (a.has_facts or a.has_theories) and not a.has_category_traits]
|
||||
|
||||
if not args.json:
|
||||
print(f"\n{'=' * 60}")
|
||||
print(f"FIXING {len(files_to_fix)} FILES WITH DEFAULT CATEGORY: {args.category}")
|
||||
print("=" * 60)
|
||||
|
||||
fixed_count = 0
|
||||
for file_path in files_to_fix:
|
||||
if add_default_trait(file_path, args.category):
|
||||
fixed_count += 1
|
||||
if not args.json:
|
||||
print(f" Fixed: {file_path}")
|
||||
|
||||
if not args.json:
|
||||
print(f"\nFixed {fixed_count} files")
|
||||
|
||||
# Exit with error code if coverage is below threshold
|
||||
if report['summary']['coverage_percent'] < 80:
|
||||
sys.exit(1)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user