devops folders consolidate
This commit is contained in:
160
deploy/tools/ci/determinism/compare-platform-hashes.py
Normal file
160
deploy/tools/ci/determinism/compare-platform-hashes.py
Normal file
@@ -0,0 +1,160 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cross-platform hash comparison for determinism verification.
|
||||
Sprint: SPRINT_20251226_007_BE_determinism_gaps
|
||||
Task: DET-GAP-13 - Cross-platform hash comparison report generation
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def load_hashes(path: str) -> dict[str, str]:
|
||||
"""Load hash file from path."""
|
||||
with open(path) as f:
|
||||
data = json.load(f)
|
||||
return data.get("hashes", data)
|
||||
|
||||
|
||||
def compare_hashes(
|
||||
linux: dict[str, str],
|
||||
windows: dict[str, str],
|
||||
macos: dict[str, str]
|
||||
) -> tuple[list[dict], list[str]]:
|
||||
"""
|
||||
Compare hashes across platforms.
|
||||
Returns (divergences, matched_keys).
|
||||
"""
|
||||
all_keys = set(linux.keys()) | set(windows.keys()) | set(macos.keys())
|
||||
divergences = []
|
||||
matched = []
|
||||
|
||||
for key in sorted(all_keys):
|
||||
linux_hash = linux.get(key, "MISSING")
|
||||
windows_hash = windows.get(key, "MISSING")
|
||||
macos_hash = macos.get(key, "MISSING")
|
||||
|
||||
if linux_hash == windows_hash == macos_hash:
|
||||
matched.append(key)
|
||||
else:
|
||||
divergences.append({
|
||||
"key": key,
|
||||
"linux": linux_hash,
|
||||
"windows": windows_hash,
|
||||
"macos": macos_hash
|
||||
})
|
||||
|
||||
return divergences, matched
|
||||
|
||||
|
||||
def generate_markdown_report(
|
||||
divergences: list[dict],
|
||||
matched: list[str],
|
||||
linux_path: str,
|
||||
windows_path: str,
|
||||
macos_path: str
|
||||
) -> str:
|
||||
"""Generate Markdown report."""
|
||||
lines = [
|
||||
f"**Generated:** {datetime.now(timezone.utc).isoformat()}",
|
||||
"",
|
||||
"### Summary",
|
||||
"",
|
||||
f"- ✅ **Matched:** {len(matched)} hashes",
|
||||
f"- {'❌' if divergences else '✅'} **Divergences:** {len(divergences)} hashes",
|
||||
"",
|
||||
]
|
||||
|
||||
if divergences:
|
||||
lines.extend([
|
||||
"### Divergences",
|
||||
"",
|
||||
"| Key | Linux | Windows | macOS |",
|
||||
"|-----|-------|---------|-------|",
|
||||
])
|
||||
for d in divergences:
|
||||
linux_short = d["linux"][:16] + "..." if len(d["linux"]) > 16 else d["linux"]
|
||||
windows_short = d["windows"][:16] + "..." if len(d["windows"]) > 16 else d["windows"]
|
||||
macos_short = d["macos"][:16] + "..." if len(d["macos"]) > 16 else d["macos"]
|
||||
lines.append(f"| `{d['key']}` | `{linux_short}` | `{windows_short}` | `{macos_short}` |")
|
||||
lines.append("")
|
||||
|
||||
lines.extend([
|
||||
"### Matched Hashes",
|
||||
"",
|
||||
f"<details><summary>Show {len(matched)} matched hashes</summary>",
|
||||
"",
|
||||
])
|
||||
for key in matched[:50]: # Limit display
|
||||
lines.append(f"- `{key}`")
|
||||
if len(matched) > 50:
|
||||
lines.append(f"- ... and {len(matched) - 50} more")
|
||||
lines.extend(["", "</details>", ""])
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Compare determinism hashes across platforms")
|
||||
parser.add_argument("--linux", required=True, help="Path to Linux hashes JSON")
|
||||
parser.add_argument("--windows", required=True, help="Path to Windows hashes JSON")
|
||||
parser.add_argument("--macos", required=True, help="Path to macOS hashes JSON")
|
||||
parser.add_argument("--output", required=True, help="Output JSON report path")
|
||||
parser.add_argument("--markdown", required=True, help="Output Markdown report path")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Load hashes
|
||||
linux_hashes = load_hashes(args.linux)
|
||||
windows_hashes = load_hashes(args.windows)
|
||||
macos_hashes = load_hashes(args.macos)
|
||||
|
||||
# Compare
|
||||
divergences, matched = compare_hashes(linux_hashes, windows_hashes, macos_hashes)
|
||||
|
||||
# Generate reports
|
||||
report = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"sources": {
|
||||
"linux": args.linux,
|
||||
"windows": args.windows,
|
||||
"macos": args.macos
|
||||
},
|
||||
"summary": {
|
||||
"matched": len(matched),
|
||||
"divergences": len(divergences),
|
||||
"total": len(matched) + len(divergences)
|
||||
},
|
||||
"divergences": divergences,
|
||||
"matched": matched
|
||||
}
|
||||
|
||||
# Write JSON report
|
||||
Path(args.output).parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(args.output, "w") as f:
|
||||
json.dump(report, f, indent=2)
|
||||
|
||||
# Write Markdown report
|
||||
markdown = generate_markdown_report(
|
||||
divergences, matched,
|
||||
args.linux, args.windows, args.macos
|
||||
)
|
||||
with open(args.markdown, "w") as f:
|
||||
f.write(markdown)
|
||||
|
||||
# Print summary
|
||||
print(f"Comparison complete:")
|
||||
print(f" Matched: {len(matched)}")
|
||||
print(f" Divergences: {len(divergences)}")
|
||||
|
||||
# Exit with error if divergences found
|
||||
if divergences:
|
||||
print("\nERROR: Hash divergences detected!")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1 @@
|
||||
global using Xunit;
|
||||
@@ -0,0 +1,16 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,48 @@
|
||||
using System.Xml.Linq;
|
||||
using FluentAssertions;
|
||||
|
||||
namespace NugetPrime.Tests;
|
||||
|
||||
public sealed class NugetPrimeTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData("nuget-prime.csproj")]
|
||||
[InlineData("nuget-prime-v9.csproj")]
|
||||
public void PackageDownloads_ArePinned(string projectFile)
|
||||
{
|
||||
var repoRoot = FindRepoRoot();
|
||||
var path = Path.Combine(repoRoot, "devops", "tools", "nuget-prime", projectFile);
|
||||
File.Exists(path).Should().BeTrue($"expected {projectFile} under devops/tools/nuget-prime");
|
||||
|
||||
var doc = XDocument.Load(path);
|
||||
var packages = doc.Descendants().Where(element => element.Name.LocalName == "PackageDownload").ToList();
|
||||
packages.Should().NotBeEmpty();
|
||||
|
||||
foreach (var package in packages)
|
||||
{
|
||||
var include = package.Attribute("Include")?.Value;
|
||||
include.Should().NotBeNullOrWhiteSpace();
|
||||
|
||||
var version = package.Attribute("Version")?.Value;
|
||||
version.Should().NotBeNullOrWhiteSpace();
|
||||
version.Should().NotContain("*");
|
||||
}
|
||||
}
|
||||
|
||||
private static string FindRepoRoot()
|
||||
{
|
||||
var current = new DirectoryInfo(AppContext.BaseDirectory);
|
||||
for (var i = 0; i < 12 && current is not null; i++)
|
||||
{
|
||||
var candidate = Path.Combine(current.FullName, "devops", "tools", "nuget-prime", "nuget-prime.csproj");
|
||||
if (File.Exists(candidate))
|
||||
{
|
||||
return current.FullName;
|
||||
}
|
||||
|
||||
current = current.Parent;
|
||||
}
|
||||
|
||||
throw new DirectoryNotFoundException("Repo root not found for devops/tools/nuget-prime");
|
||||
}
|
||||
}
|
||||
30
deploy/tools/ci/nuget-prime/mirror-packages.txt
Normal file
30
deploy/tools/ci/nuget-prime/mirror-packages.txt
Normal file
@@ -0,0 +1,30 @@
|
||||
AWSSDK.S3|3.7.305.6
|
||||
CycloneDX.Core|10.0.1
|
||||
Google.Protobuf|3.27.2
|
||||
Grpc.Net.Client|2.65.0
|
||||
Grpc.Tools|2.65.0
|
||||
Microsoft.Data.Sqlite|9.0.0-rc.1.24451.1
|
||||
Microsoft.Extensions.Configuration.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Configuration.Abstractions|9.0.0
|
||||
Microsoft.Extensions.Configuration.Binder|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.DependencyInjection.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.DependencyInjection.Abstractions|9.0.0
|
||||
Microsoft.Extensions.Diagnostics.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Diagnostics.HealthChecks|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Hosting.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Http.Polly|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Http|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Logging.Abstractions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Logging.Abstractions|9.0.0
|
||||
Microsoft.Extensions.Options.ConfigurationExtensions|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Options|10.0.0-rc.2.25502.107
|
||||
Microsoft.Extensions.Options|9.0.0
|
||||
Npgsql|9.0.3
|
||||
Npgsql.EntityFrameworkCore.PostgreSQL|9.0.3
|
||||
RoaringBitmap|0.0.9
|
||||
Serilog.AspNetCore|8.0.1
|
||||
Serilog.Extensions.Hosting|8.0.0
|
||||
Serilog.Sinks.Console|5.0.1
|
||||
StackExchange.Redis|2.7.33
|
||||
System.Text.Json|10.0.0-preview.7.25380.108
|
||||
14
deploy/tools/ci/nuget-prime/nuget-prime-v9.csproj
Normal file
14
deploy/tools/ci/nuget-prime/nuget-prime-v9.csproj
Normal file
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<RestorePackagesPath>../../.nuget/packages</RestorePackagesPath>
|
||||
<DisableImplicitFrameworkReferences>true</DisableImplicitFrameworkReferences>
|
||||
<EnableDefaultItems>false</EnableDefaultItems>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageDownload Include="Microsoft.Extensions.Configuration.Abstractions" Version="[9.0.0]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="[9.0.0]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Logging.Abstractions" Version="[9.0.0]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Options" Version="[9.0.0]" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
45
deploy/tools/ci/nuget-prime/nuget-prime.csproj
Normal file
45
deploy/tools/ci/nuget-prime/nuget-prime.csproj
Normal file
@@ -0,0 +1,45 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<RestorePackagesPath>../../.nuget/packages</RestorePackagesPath>
|
||||
<DisableImplicitFrameworkReferences>true</DisableImplicitFrameworkReferences>
|
||||
<EnableDefaultItems>false</EnableDefaultItems>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageDownload Include="AWSSDK.Core" Version="[4.0.1.3]" />
|
||||
<PackageDownload Include="AWSSDK.KeyManagementService" Version="[4.0.6]" />
|
||||
<PackageDownload Include="AWSSDK.S3" Version="[3.7.305.6]" />
|
||||
<PackageDownload Include="CycloneDX.Core" Version="[10.0.2]" />
|
||||
<PackageDownload Include="Google.Protobuf" Version="[3.27.2]" />
|
||||
<PackageDownload Include="Grpc.Net.Client" Version="[2.65.0]" />
|
||||
<PackageDownload Include="Grpc.Tools" Version="[2.65.0]" />
|
||||
<PackageDownload Include="Microsoft.Data.Sqlite" Version="[9.0.0-rc.1.24451.1]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Configuration.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Configuration.Binder" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Diagnostics.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Hosting.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Http.Polly" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Http" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Logging.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Microsoft.Extensions.Options" Version="[10.0.0-rc.2.25502.107]" />
|
||||
<PackageDownload Include="Npgsql" Version="[9.0.3]" />
|
||||
<PackageDownload Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="[9.0.3]" />
|
||||
<PackageDownload Include="RoaringBitmap" Version="[0.0.9]" />
|
||||
<PackageDownload Include="Serilog.AspNetCore" Version="[8.0.1]" />
|
||||
<PackageDownload Include="Serilog.Extensions.Hosting" Version="[8.0.0]" />
|
||||
<PackageDownload Include="Serilog.Sinks.Console" Version="[5.0.1]" />
|
||||
<PackageDownload Include="StackExchange.Redis" Version="[2.8.37]" />
|
||||
<PackageDownload Include="System.Text.Json" Version="[10.0.0-preview.7.25380.108]" />
|
||||
<PackageDownload Include="Google.Api.CommonProtos" Version="[2.17.0]" />
|
||||
<PackageDownload Include="Google.Api.Gax" Version="[4.11.0]" />
|
||||
<PackageDownload Include="Google.Api.Gax.Grpc" Version="[4.11.0]" />
|
||||
<PackageDownload Include="Google.Api.Gax.Grpc.GrpcCore" Version="[4.11.0]" />
|
||||
<PackageDownload Include="Google.Apis" Version="[1.69.0]" />
|
||||
<PackageDownload Include="Google.Apis.Auth" Version="[1.69.0]" />
|
||||
<PackageDownload Include="Google.Apis.Core" Version="[1.64.0]" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
87
deploy/tools/feeds/concelier/backfill-store-aoc-19-005.sh
Normal file
87
deploy/tools/feeds/concelier/backfill-store-aoc-19-005.sh
Normal file
@@ -0,0 +1,87 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Postgres backfill runner for STORE-AOC-19-005-DEV (Link-Not-Merge raw linksets/chunks)
|
||||
# Usage:
|
||||
# PGURI=postgres://.../concelier ./scripts/concelier/backfill-store-aoc-19-005.sh /path/to/linksets-stage-backfill.tar.zst
|
||||
# Optional:
|
||||
# PGSCHEMA=lnm_raw (default), DRY_RUN=1 to stop after extraction
|
||||
#
|
||||
# Assumptions:
|
||||
# - Dataset contains ndjson files: linksets.ndjson, advisory_chunks.ndjson, manifest.json
|
||||
# - Target staging tables are created by this script if absent:
|
||||
# <schema>.linksets_raw(id text primary key, raw jsonb)
|
||||
# <schema>.advisory_chunks_raw(id text primary key, raw jsonb)
|
||||
|
||||
DATASET_PATH="${1:-}"
|
||||
if [[ -z "${DATASET_PATH}" || ! -f "${DATASET_PATH}" ]]; then
|
||||
echo "Dataset tarball not found. Provide path to linksets-stage-backfill.tar.zst" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PGURI="${PGURI:-${CONCELIER_PG_URI:-}}"
|
||||
PGSCHEMA="${PGSCHEMA:-lnm_raw}"
|
||||
DRY_RUN="${DRY_RUN:-0}"
|
||||
|
||||
if [[ -z "${PGURI}" ]]; then
|
||||
echo "PGURI (or CONCELIER_PG_URI) must be set" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
WORKDIR="$(mktemp -d)"
|
||||
cleanup() { rm -rf "${WORKDIR}"; }
|
||||
trap cleanup EXIT
|
||||
|
||||
echo "==> Dataset: ${DATASET_PATH}"
|
||||
sha256sum "${DATASET_PATH}"
|
||||
|
||||
echo "==> Extracting to ${WORKDIR}"
|
||||
tar -xf "${DATASET_PATH}" -C "${WORKDIR}"
|
||||
|
||||
for required in linksets.ndjson advisory_chunks.ndjson manifest.json; do
|
||||
if [[ ! -f "${WORKDIR}/${required}" ]]; then
|
||||
echo "Missing required file in dataset: ${required}" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "==> Ensuring staging schema/tables exist in Postgres"
|
||||
psql "${PGURI}" <<SQL
|
||||
create schema if not exists ${PGSCHEMA};
|
||||
create table if not exists ${PGSCHEMA}.linksets_raw (
|
||||
id text primary key,
|
||||
raw jsonb not null
|
||||
);
|
||||
create table if not exists ${PGSCHEMA}.advisory_chunks_raw (
|
||||
id text primary key,
|
||||
raw jsonb not null
|
||||
);
|
||||
SQL
|
||||
|
||||
if [[ "${DRY_RUN}" != "0" ]]; then
|
||||
echo "DRY_RUN=1 set; extraction and schema verification completed, skipping import."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "==> Importing linksets into ${PGSCHEMA}.linksets_raw"
|
||||
cat >"${WORKDIR}/linksets.tsv" <(jq -rc '[._id, .] | @tsv' "${WORKDIR}/linksets.ndjson")
|
||||
psql "${PGURI}" <<SQL
|
||||
TRUNCATE TABLE ${PGSCHEMA}.linksets_raw;
|
||||
\copy ${PGSCHEMA}.linksets_raw (id, raw) FROM '${WORKDIR}/linksets.tsv' WITH (FORMAT csv, DELIMITER E'\t', QUOTE '"', ESCAPE '"');
|
||||
SQL
|
||||
|
||||
echo "==> Importing advisory_chunks into ${PGSCHEMA}.advisory_chunks_raw"
|
||||
cat >"${WORKDIR}/advisory_chunks.tsv" <(jq -rc '[._id, .] | @tsv' "${WORKDIR}/advisory_chunks.ndjson")
|
||||
psql "${PGURI}" <<SQL
|
||||
TRUNCATE TABLE ${PGSCHEMA}.advisory_chunks_raw;
|
||||
\copy ${PGSCHEMA}.advisory_chunks_raw (id, raw) FROM '${WORKDIR}/advisory_chunks.tsv' WITH (FORMAT csv, DELIMITER E'\t', QUOTE '"', ESCAPE '"');
|
||||
SQL
|
||||
|
||||
echo "==> Post-import counts"
|
||||
psql -tA "${PGURI}" -c "select 'linksets_raw='||count(*) from ${PGSCHEMA}.linksets_raw;"
|
||||
psql -tA "${PGURI}" -c "select 'advisory_chunks_raw='||count(*) from ${PGSCHEMA}.advisory_chunks_raw;"
|
||||
|
||||
echo "==> Manifest summary"
|
||||
cat "${WORKDIR}/manifest.json"
|
||||
|
||||
echo "Backfill complete."
|
||||
@@ -0,0 +1,74 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Deterministic dataset builder for STORE-AOC-19-005-DEV.
|
||||
# Generates linksets-stage-backfill.tar.zst from repo seed data.
|
||||
# Usage:
|
||||
# ./scripts/concelier/build-store-aoc-19-005-dataset.sh [output_tarball]
|
||||
# Default output: out/linksets/linksets-stage-backfill.tar.zst
|
||||
|
||||
command -v tar >/dev/null || { echo "tar is required" >&2; exit 1; }
|
||||
command -v sha256sum >/dev/null || { echo "sha256sum is required" >&2; exit 1; }
|
||||
|
||||
TAR_COMPRESS=()
|
||||
if command -v zstd >/dev/null 2>&1; then
|
||||
TAR_COMPRESS=(--zstd)
|
||||
else
|
||||
echo "zstd not found; building uncompressed tarball (extension kept for compatibility)" >&2
|
||||
fi
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
SEED_DIR="${ROOT_DIR}/src/__Tests/__Datasets/seed-data/concelier/store-aoc-19-005"
|
||||
OUT_DIR="${ROOT_DIR}/out/linksets"
|
||||
OUT_PATH="${1:-${OUT_DIR}/linksets-stage-backfill.tar.zst}"
|
||||
GEN_TIME="2025-12-07T00:00:00Z"
|
||||
|
||||
for seed in linksets.ndjson advisory_chunks.ndjson; do
|
||||
if [[ ! -f "${SEED_DIR}/${seed}" ]]; then
|
||||
echo "Missing seed file: ${SEED_DIR}/${seed}" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
WORKDIR="$(mktemp -d)"
|
||||
cleanup() { rm -rf "${WORKDIR}"; }
|
||||
trap cleanup EXIT
|
||||
|
||||
cp "${SEED_DIR}/linksets.ndjson" "${WORKDIR}/linksets.ndjson"
|
||||
cp "${SEED_DIR}/advisory_chunks.ndjson" "${WORKDIR}/advisory_chunks.ndjson"
|
||||
|
||||
linksets_sha=$(sha256sum "${WORKDIR}/linksets.ndjson" | awk '{print $1}')
|
||||
advisory_sha=$(sha256sum "${WORKDIR}/advisory_chunks.ndjson" | awk '{print $1}')
|
||||
linksets_count=$(wc -l < "${WORKDIR}/linksets.ndjson" | tr -d '[:space:]')
|
||||
advisory_count=$(wc -l < "${WORKDIR}/advisory_chunks.ndjson" | tr -d '[:space:]')
|
||||
|
||||
cat >"${WORKDIR}/manifest.json" <<EOF
|
||||
{
|
||||
"datasetId": "store-aoc-19-005-dev",
|
||||
"generatedAt": "${GEN_TIME}",
|
||||
"source": "src/__Tests/__Datasets/seed-data/concelier/store-aoc-19-005",
|
||||
"records": {
|
||||
"linksets": ${linksets_count},
|
||||
"advisory_chunks": ${advisory_count}
|
||||
},
|
||||
"sha256": {
|
||||
"linksets.ndjson": "${linksets_sha}",
|
||||
"advisory_chunks.ndjson": "${advisory_sha}"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
mkdir -p "${OUT_DIR}"
|
||||
|
||||
tar "${TAR_COMPRESS[@]}" \
|
||||
--format=ustar \
|
||||
--mtime='1970-01-01 00:00:00Z' \
|
||||
--owner=0 --group=0 --numeric-owner \
|
||||
-cf "${OUT_PATH}" \
|
||||
-C "${WORKDIR}" \
|
||||
linksets.ndjson advisory_chunks.ndjson manifest.json
|
||||
|
||||
sha256sum "${OUT_PATH}" > "${OUT_PATH}.sha256"
|
||||
|
||||
echo "Wrote ${OUT_PATH}"
|
||||
cat "${OUT_PATH}.sha256"
|
||||
55
deploy/tools/feeds/concelier/export-linksets-tarball.sh
Normal file
55
deploy/tools/feeds/concelier/export-linksets-tarball.sh
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Export Concelier linksets/advisory_chunks from Postgres to a tar.zst bundle.
|
||||
# Usage:
|
||||
# PGURI=postgres://user:pass@host:5432/db \
|
||||
# ./scripts/concelier/export-linksets-tarball.sh out/linksets/linksets-stage-backfill.tar.zst
|
||||
#
|
||||
# Optional env:
|
||||
# PGSCHEMA=public # schema that owns linksets/advisory_chunks
|
||||
# LINKSETS_TABLE=linksets # table name for linksets
|
||||
# CHUNKS_TABLE=advisory_chunks # table name for advisory chunks
|
||||
# TMPDIR=/tmp/export-linksets # working directory (defaults to mktemp)
|
||||
|
||||
TARGET="${1:-}"
|
||||
if [[ -z "${TARGET}" ]]; then
|
||||
echo "Usage: PGURI=... $0 out/linksets/linksets-stage-backfill.tar.zst" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "${PGURI:-}" ]]; then
|
||||
echo "PGURI environment variable is required (postgres://...)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PGSCHEMA="${PGSCHEMA:-public}"
|
||||
LINKSETS_TABLE="${LINKSETS_TABLE:-linksets}"
|
||||
CHUNKS_TABLE="${CHUNKS_TABLE:-advisory_chunks}"
|
||||
WORKDIR="${TMPDIR:-$(mktemp -d)}"
|
||||
|
||||
mkdir -p "${WORKDIR}"
|
||||
OUTDIR="$(dirname "${TARGET}")"
|
||||
mkdir -p "${OUTDIR}"
|
||||
|
||||
echo "==> Exporting linksets from ${PGSCHEMA}.${LINKSETS_TABLE}"
|
||||
psql "${PGURI}" -c "\copy (select row_to_json(t) from ${PGSCHEMA}.${LINKSETS_TABLE} t) to '${WORKDIR}/linksets.ndjson'"
|
||||
|
||||
echo "==> Exporting advisory_chunks from ${PGSCHEMA}.${CHUNKS_TABLE}"
|
||||
psql "${PGURI}" -c "\copy (select row_to_json(t) from ${PGSCHEMA}.${CHUNKS_TABLE} t) to '${WORKDIR}/advisory_chunks.ndjson'"
|
||||
|
||||
LINKSETS_COUNT="$(wc -l < "${WORKDIR}/linksets.ndjson")"
|
||||
CHUNKS_COUNT="$(wc -l < "${WORKDIR}/advisory_chunks.ndjson")"
|
||||
|
||||
echo "==> Writing manifest.json"
|
||||
jq -n --argjson linksets "${LINKSETS_COUNT}" --argjson advisory_chunks "${CHUNKS_COUNT}" \
|
||||
'{linksets: $linksets, advisory_chunks: $advisory_chunks}' \
|
||||
> "${WORKDIR}/manifest.json"
|
||||
|
||||
echo "==> Building tarball ${TARGET}"
|
||||
tar -I "zstd -19" -cf "${TARGET}" -C "${WORKDIR}" linksets.ndjson advisory_chunks.ndjson manifest.json
|
||||
|
||||
echo "==> SHA-256"
|
||||
sha256sum "${TARGET}"
|
||||
|
||||
echo "Done. Workdir: ${WORKDIR}"
|
||||
@@ -0,0 +1,90 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Validates the store-aoc-19-005 dataset tarball.
|
||||
# Usage: ./scripts/concelier/test-store-aoc-19-005-dataset.sh [tarball]
|
||||
|
||||
command -v tar >/dev/null || { echo "tar is required" >&2; exit 1; }
|
||||
command -v sha256sum >/dev/null || { echo "sha256sum is required" >&2; exit 1; }
|
||||
command -v python >/dev/null || { echo "python is required" >&2; exit 1; }
|
||||
|
||||
DATASET="${1:-out/linksets/linksets-stage-backfill.tar.zst}"
|
||||
|
||||
if [[ ! -f "${DATASET}" ]]; then
|
||||
echo "Dataset not found: ${DATASET}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
WORKDIR="$(mktemp -d)"
|
||||
cleanup() { rm -rf "${WORKDIR}"; }
|
||||
trap cleanup EXIT
|
||||
|
||||
tar -xf "${DATASET}" -C "${WORKDIR}"
|
||||
|
||||
for required in linksets.ndjson advisory_chunks.ndjson manifest.json; do
|
||||
if [[ ! -f "${WORKDIR}/${required}" ]]; then
|
||||
echo "Missing ${required} in dataset" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
manifest="${WORKDIR}/manifest.json"
|
||||
expected_linksets=$(python - <<'PY' "${manifest}"
|
||||
import json, sys
|
||||
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(data["records"]["linksets"])
|
||||
PY
|
||||
)
|
||||
expected_chunks=$(python - <<'PY' "${manifest}"
|
||||
import json, sys
|
||||
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(data["records"]["advisory_chunks"])
|
||||
PY
|
||||
)
|
||||
expected_linksets_sha=$(python - <<'PY' "${manifest}"
|
||||
import json, sys
|
||||
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(data["sha256"]["linksets.ndjson"])
|
||||
PY
|
||||
)
|
||||
expected_chunks_sha=$(python - <<'PY' "${manifest}"
|
||||
import json, sys
|
||||
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(data["sha256"]["advisory_chunks.ndjson"])
|
||||
PY
|
||||
)
|
||||
|
||||
actual_linksets=$(wc -l < "${WORKDIR}/linksets.ndjson" | tr -d '[:space:]')
|
||||
actual_chunks=$(wc -l < "${WORKDIR}/advisory_chunks.ndjson" | tr -d '[:space:]')
|
||||
actual_linksets_sha=$(sha256sum "${WORKDIR}/linksets.ndjson" | awk '{print $1}')
|
||||
actual_chunks_sha=$(sha256sum "${WORKDIR}/advisory_chunks.ndjson" | awk '{print $1}')
|
||||
|
||||
if [[ "${expected_linksets}" != "${actual_linksets}" ]]; then
|
||||
echo "linksets count mismatch: expected ${expected_linksets}, got ${actual_linksets}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${expected_chunks}" != "${actual_chunks}" ]]; then
|
||||
echo "advisory_chunks count mismatch: expected ${expected_chunks}, got ${actual_chunks}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${expected_linksets_sha}" != "${actual_linksets_sha}" ]]; then
|
||||
echo "linksets sha mismatch: expected ${expected_linksets_sha}, got ${actual_linksets_sha}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${expected_chunks_sha}" != "${actual_chunks_sha}" ]]; then
|
||||
echo "advisory_chunks sha mismatch: expected ${expected_chunks_sha}, got ${actual_chunks_sha}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Dataset validation succeeded:"
|
||||
echo " linksets: ${actual_linksets}"
|
||||
echo " advisory_chunks: ${actual_chunks}"
|
||||
echo " linksets.sha256=${actual_linksets_sha}"
|
||||
echo " advisory_chunks.sha256=${actual_chunks_sha}"
|
||||
467
deploy/tools/feeds/feeds/run_icscisa_kisa_refresh.py
Normal file
467
deploy/tools/feeds/feeds/run_icscisa_kisa_refresh.py
Normal file
@@ -0,0 +1,467 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
ICS/KISA feed refresh runner.
|
||||
|
||||
Runs the SOP v0.2 workflow to emit NDJSON advisories, delta, fetch log, and hash
|
||||
manifest under out/feeds/icscisa-kisa/<YYYYMMDD>/.
|
||||
|
||||
Defaults to live fetch with offline-safe fallback to baked-in samples. You can
|
||||
force live/offline via env or CLI flags.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from html import unescape
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, List, Tuple
|
||||
from urllib.error import URLError, HTTPError
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
from urllib.request import Request, urlopen
|
||||
from xml.etree import ElementTree
|
||||
|
||||
|
||||
DEFAULT_OUTPUT_ROOT = Path("out/feeds/icscisa-kisa")
|
||||
DEFAULT_ICSCISA_URL = "https://www.cisa.gov/news-events/ics-advisories/icsa.xml"
|
||||
DEFAULT_KISA_URL = "https://knvd.krcert.or.kr/rss/securityInfo.do"
|
||||
DEFAULT_GATEWAY_HOST = "concelier-webservice"
|
||||
DEFAULT_GATEWAY_SCHEME = "http"
|
||||
USER_AGENT = "StellaOpsFeedRefresh/1.0 (+https://stella-ops.org)"
|
||||
|
||||
|
||||
def utcnow() -> dt.datetime:
|
||||
return dt.datetime.utcnow().replace(tzinfo=dt.timezone.utc)
|
||||
|
||||
|
||||
def iso(ts: dt.datetime) -> str:
|
||||
return ts.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
|
||||
def sha256_bytes(data: bytes) -> str:
|
||||
return hashlib.sha256(data).hexdigest()
|
||||
|
||||
|
||||
def strip_html(value: str) -> str:
|
||||
return re.sub(r"<[^>]+>", "", value or "").strip()
|
||||
|
||||
|
||||
def safe_request(url: str) -> bytes:
|
||||
req = Request(url, headers={"User-Agent": USER_AGENT})
|
||||
with urlopen(req, timeout=30) as resp:
|
||||
return resp.read()
|
||||
|
||||
|
||||
def parse_rss_items(xml_bytes: bytes) -> Iterable[Dict[str, str]]:
|
||||
root = ElementTree.fromstring(xml_bytes)
|
||||
for item in root.findall(".//item"):
|
||||
title = (item.findtext("title") or "").strip()
|
||||
link = (item.findtext("link") or "").strip()
|
||||
description = strip_html(unescape(item.findtext("description") or ""))
|
||||
pub_date = (item.findtext("pubDate") or "").strip()
|
||||
yield {
|
||||
"title": title,
|
||||
"link": link,
|
||||
"description": description,
|
||||
"pub_date": pub_date,
|
||||
}
|
||||
|
||||
|
||||
def normalize_icscisa_record(item: Dict[str, str], fetched_at: str, run_id: str) -> Dict[str, object]:
|
||||
advisory_id = item["title"].split(":")[0].strip() or "icsa-unknown"
|
||||
summary = item["description"] or item["title"]
|
||||
raw_payload = f"{item['title']}\n{item['link']}\n{item['description']}"
|
||||
record = {
|
||||
"advisory_id": advisory_id,
|
||||
"source": "icscisa",
|
||||
"source_url": item["link"] or DEFAULT_ICSCISA_URL,
|
||||
"title": item["title"] or advisory_id,
|
||||
"summary": summary,
|
||||
"published": iso(parse_pubdate(item["pub_date"])),
|
||||
"updated": iso(parse_pubdate(item["pub_date"])),
|
||||
"severity": "unknown",
|
||||
"cvss": None,
|
||||
"cwe": [],
|
||||
"affected_products": [],
|
||||
"references": [url for url in (item["link"],) if url],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": fetched_at,
|
||||
"run_id": run_id,
|
||||
"payload_sha256": sha256_bytes(raw_payload.encode("utf-8")),
|
||||
}
|
||||
return record
|
||||
|
||||
|
||||
def normalize_kisa_record(item: Dict[str, str], fetched_at: str, run_id: str) -> Dict[str, object]:
|
||||
advisory_id = extract_kisa_id(item)
|
||||
raw_payload = f"{item['title']}\n{item['link']}\n{item['description']}"
|
||||
record = {
|
||||
"advisory_id": advisory_id,
|
||||
"source": "kisa",
|
||||
"source_url": item["link"] or DEFAULT_KISA_URL,
|
||||
"title": item["title"] or advisory_id,
|
||||
"summary": item["description"] or item["title"],
|
||||
"published": iso(parse_pubdate(item["pub_date"])),
|
||||
"updated": iso(parse_pubdate(item["pub_date"])),
|
||||
"severity": "unknown",
|
||||
"cvss": None,
|
||||
"cwe": [],
|
||||
"affected_products": [],
|
||||
"references": [url for url in (item["link"], DEFAULT_KISA_URL) if url],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": fetched_at,
|
||||
"run_id": run_id,
|
||||
"payload_sha256": sha256_bytes(raw_payload.encode("utf-8")),
|
||||
}
|
||||
return record
|
||||
|
||||
|
||||
def extract_kisa_id(item: Dict[str, str]) -> str:
|
||||
link = item["link"]
|
||||
match = re.search(r"IDX=([0-9]+)", link)
|
||||
if match:
|
||||
return f"KISA-{match.group(1)}"
|
||||
return (item["title"].split()[0] if item["title"] else "KISA-unknown").strip()
|
||||
|
||||
|
||||
def parse_pubdate(value: str) -> dt.datetime:
|
||||
if not value:
|
||||
return utcnow()
|
||||
try:
|
||||
# RFC1123-ish
|
||||
return dt.datetime.strptime(value, "%a, %d %b %Y %H:%M:%S %Z").replace(tzinfo=dt.timezone.utc)
|
||||
except ValueError:
|
||||
try:
|
||||
return dt.datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return utcnow()
|
||||
|
||||
|
||||
def sample_records() -> List[Dict[str, object]]:
|
||||
now_iso = iso(utcnow())
|
||||
return [
|
||||
{
|
||||
"advisory_id": "ICSA-25-123-01",
|
||||
"source": "icscisa",
|
||||
"source_url": "https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01",
|
||||
"title": "Example ICS Advisory",
|
||||
"summary": "Example Corp ControlSuite RCE via exposed management service.",
|
||||
"published": "2025-10-13T12:00:00Z",
|
||||
"updated": "2025-11-30T00:00:00Z",
|
||||
"severity": "High",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", "score": 9.8},
|
||||
"cwe": ["CWE-269"],
|
||||
"affected_products": [{"vendor": "Example Corp", "product": "ControlSuite", "versions": ["4.2.0", "4.2.1"]}],
|
||||
"references": [
|
||||
"https://example.com/security/icsa-25-123-01.pdf",
|
||||
"https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"ICSA-25-123-01 Example ControlSuite advisory payload"),
|
||||
},
|
||||
{
|
||||
"advisory_id": "ICSMA-25-045-01",
|
||||
"source": "icscisa",
|
||||
"source_url": "https://www.cisa.gov/news-events/ics-medical-advisories/icsma-25-045-01",
|
||||
"title": "Example Medical Advisory",
|
||||
"summary": "HealthTech infusion pump vulnerabilities including two CVEs.",
|
||||
"published": "2025-10-14T09:30:00Z",
|
||||
"updated": "2025-12-01T00:00:00Z",
|
||||
"severity": "Medium",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:H/PR:L/UI:R/S:U/C:L/I:L/A:L", "score": 6.3},
|
||||
"cwe": ["CWE-319"],
|
||||
"affected_products": [{"vendor": "HealthTech", "product": "InfusionManager", "versions": ["2.1.0", "2.1.1"]}],
|
||||
"references": [
|
||||
"https://www.cisa.gov/news-events/ics-medical-advisories/icsma-25-045-01",
|
||||
"https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2025-11111",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"ICSMA-25-045-01 Example medical advisory payload"),
|
||||
},
|
||||
{
|
||||
"advisory_id": "KISA-2025-5859",
|
||||
"source": "kisa",
|
||||
"source_url": "https://knvd.krcert.or.kr/detailDos.do?IDX=5859",
|
||||
"title": "KISA sample advisory 5859",
|
||||
"summary": "Remote code execution in ControlBoard service (offline HTML snapshot).",
|
||||
"published": "2025-11-03T22:53:00Z",
|
||||
"updated": "2025-12-02T00:00:00Z",
|
||||
"severity": "High",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", "score": 9.8},
|
||||
"cwe": ["CWE-787"],
|
||||
"affected_products": [{"vendor": "ACME", "product": "ControlBoard", "versions": ["1.0.1.0084", "2.0.1.0034"]}],
|
||||
"references": [
|
||||
"https://knvd.krcert.or.kr/rss/securityInfo.do",
|
||||
"https://knvd.krcert.or.kr/detailDos.do?IDX=5859",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"KISA advisory IDX 5859 cached HTML payload"),
|
||||
},
|
||||
{
|
||||
"advisory_id": "KISA-2025-5860",
|
||||
"source": "kisa",
|
||||
"source_url": "https://knvd.krcert.or.kr/detailDos.do?IDX=5860",
|
||||
"title": "KISA sample advisory 5860",
|
||||
"summary": "Authentication bypass via default credentials in NetGateway appliance.",
|
||||
"published": "2025-11-03T22:53:00Z",
|
||||
"updated": "2025-12-02T00:00:00Z",
|
||||
"severity": "Medium",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:L/I:L/A:L", "score": 7.3},
|
||||
"cwe": ["CWE-798"],
|
||||
"affected_products": [{"vendor": "NetGateway", "product": "Edge", "versions": ["3.4.2", "3.4.3"]}],
|
||||
"references": [
|
||||
"https://knvd.krcert.or.kr/rss/securityInfo.do",
|
||||
"https://knvd.krcert.or.kr/detailDos.do?IDX=5860",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"KISA advisory IDX 5860 cached HTML payload"),
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def build_records(
|
||||
run_id: str,
|
||||
fetched_at: str,
|
||||
live_fetch: bool,
|
||||
offline_only: bool,
|
||||
icscisa_url: str,
|
||||
kisa_url: str,
|
||||
) -> Tuple[List[Dict[str, object]], Dict[str, str]]:
|
||||
samples = sample_records()
|
||||
sample_icscisa = [r for r in samples if r["source"] == "icscisa"]
|
||||
sample_kisa = [r for r in samples if r["source"] == "kisa"]
|
||||
status = {"icscisa": "offline", "kisa": "offline"}
|
||||
records: List[Dict[str, object]] = []
|
||||
|
||||
if live_fetch and not offline_only:
|
||||
try:
|
||||
icscisa_items = list(parse_rss_items(safe_request(icscisa_url)))
|
||||
for item in icscisa_items:
|
||||
records.append(normalize_icscisa_record(item, fetched_at, run_id))
|
||||
status["icscisa"] = f"live:{len(icscisa_items)}"
|
||||
except (URLError, HTTPError, ElementTree.ParseError, TimeoutError) as exc:
|
||||
print(f"[warn] ICS CISA fetch failed ({exc}); falling back to samples.", file=sys.stderr)
|
||||
|
||||
try:
|
||||
kisa_items = list(parse_rss_items(safe_request(kisa_url)))
|
||||
for item in kisa_items:
|
||||
records.append(normalize_kisa_record(item, fetched_at, run_id))
|
||||
status["kisa"] = f"live:{len(kisa_items)}"
|
||||
except (URLError, HTTPError, ElementTree.ParseError, TimeoutError) as exc:
|
||||
print(f"[warn] KISA fetch failed ({exc}); falling back to samples.", file=sys.stderr)
|
||||
|
||||
if not records or status["icscisa"].startswith("live") is False:
|
||||
records.extend(apply_run_metadata(sample_icscisa, run_id, fetched_at))
|
||||
status["icscisa"] = status.get("icscisa") or "offline"
|
||||
|
||||
if not any(r["source"] == "kisa" for r in records):
|
||||
records.extend(apply_run_metadata(sample_kisa, run_id, fetched_at))
|
||||
status["kisa"] = status.get("kisa") or "offline"
|
||||
|
||||
return records, status
|
||||
|
||||
|
||||
def apply_run_metadata(records: Iterable[Dict[str, object]], run_id: str, fetched_at: str) -> List[Dict[str, object]]:
|
||||
updated = []
|
||||
for record in records:
|
||||
copy = dict(record)
|
||||
copy["run_id"] = run_id
|
||||
copy["fetched_at"] = fetched_at
|
||||
copy["payload_sha256"] = record.get("payload_sha256") or sha256_bytes(json.dumps(record, sort_keys=True).encode("utf-8"))
|
||||
updated.append(copy)
|
||||
return updated
|
||||
|
||||
|
||||
def find_previous_snapshot(base_dir: Path, current_run_date: str) -> Path | None:
|
||||
if not base_dir.exists():
|
||||
return None
|
||||
candidates = sorted(p for p in base_dir.iterdir() if p.is_dir() and p.name != current_run_date)
|
||||
if not candidates:
|
||||
return None
|
||||
return candidates[-1] / "advisories.ndjson"
|
||||
|
||||
|
||||
def load_previous_hash(path: Path | None) -> str | None:
|
||||
if path and path.exists():
|
||||
return sha256_bytes(path.read_bytes())
|
||||
return None
|
||||
|
||||
|
||||
def compute_delta(new_records: List[Dict[str, object]], previous_path: Path | None) -> Dict[str, object]:
|
||||
prev_records = {}
|
||||
if previous_path and previous_path.exists():
|
||||
with previous_path.open("r", encoding="utf-8") as handle:
|
||||
for line in handle:
|
||||
if line.strip():
|
||||
rec = json.loads(line)
|
||||
prev_records[rec["advisory_id"]] = rec
|
||||
|
||||
new_by_id = {r["advisory_id"]: r for r in new_records}
|
||||
added = [rid for rid in new_by_id if rid not in prev_records]
|
||||
updated = [
|
||||
rid
|
||||
for rid, rec in new_by_id.items()
|
||||
if rid in prev_records and rec.get("payload_sha256") != prev_records[rid].get("payload_sha256")
|
||||
]
|
||||
removed = [rid for rid in prev_records if rid not in new_by_id]
|
||||
|
||||
return {
|
||||
"added": {"icscisa": [rid for rid in added if new_by_id[rid]["source"] == "icscisa"],
|
||||
"kisa": [rid for rid in added if new_by_id[rid]["source"] == "kisa"]},
|
||||
"updated": {"icscisa": [rid for rid in updated if new_by_id[rid]["source"] == "icscisa"],
|
||||
"kisa": [rid for rid in updated if new_by_id[rid]["source"] == "kisa"]},
|
||||
"removed": {"icscisa": [rid for rid in removed if prev_records[rid]["source"] == "icscisa"],
|
||||
"kisa": [rid for rid in removed if prev_records[rid]["source"] == "kisa"]},
|
||||
"totals": {
|
||||
"icscisa": {
|
||||
"added": len([rid for rid in added if new_by_id[rid]["source"] == "icscisa"]),
|
||||
"updated": len([rid for rid in updated if new_by_id[rid]["source"] == "icscisa"]),
|
||||
"removed": len([rid for rid in removed if prev_records[rid]["source"] == "icscisa"]),
|
||||
"remaining": len([rid for rid, rec in new_by_id.items() if rec["source"] == "icscisa"]),
|
||||
},
|
||||
"kisa": {
|
||||
"added": len([rid for rid in added if new_by_id[rid]["source"] == "kisa"]),
|
||||
"updated": len([rid for rid in updated if new_by_id[rid]["source"] == "kisa"]),
|
||||
"removed": len([rid for rid in removed if prev_records[rid]["source"] == "kisa"]),
|
||||
"remaining": len([rid for rid, rec in new_by_id.items() if rec["source"] == "kisa"]),
|
||||
},
|
||||
"overall": len(new_records),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def write_ndjson(records: List[Dict[str, object]], path: Path) -> None:
|
||||
path.write_text("\n".join(json.dumps(r, sort_keys=True, separators=(",", ":")) for r in records) + "\n", encoding="utf-8")
|
||||
|
||||
|
||||
def write_fetch_log(
|
||||
path: Path,
|
||||
run_id: str,
|
||||
start: str,
|
||||
end: str,
|
||||
status: Dict[str, str],
|
||||
gateway_host: str,
|
||||
gateway_scheme: str,
|
||||
icscisa_url: str,
|
||||
kisa_url: str,
|
||||
live_fetch: bool,
|
||||
offline_only: bool,
|
||||
) -> None:
|
||||
lines = [
|
||||
f"run_id={run_id} start={start} end={end}",
|
||||
f"sources=icscisa,kisa cadence=weekly backlog_window=60d live_fetch={str(live_fetch).lower()} offline_only={str(offline_only).lower()}",
|
||||
f"gateway={gateway_scheme}://{gateway_host}",
|
||||
f"icscisa_url={icscisa_url} status={status.get('icscisa','offline')} retries=0",
|
||||
f"kisa_url={kisa_url} status={status.get('kisa','offline')} retries=0",
|
||||
"outputs=advisories.ndjson,delta.json,hashes.sha256",
|
||||
]
|
||||
path.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
||||
|
||||
|
||||
def write_hashes(dir_path: Path) -> None:
|
||||
entries = []
|
||||
for name in ["advisories.ndjson", "delta.json", "fetch.log"]:
|
||||
file_path = dir_path / name
|
||||
entries.append(f"{sha256_bytes(file_path.read_bytes())} {name}")
|
||||
(dir_path / "hashes.sha256").write_text("\n".join(entries) + "\n", encoding="utf-8")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Run ICS/KISA feed refresh SOP v0.2")
|
||||
parser.add_argument("--out-dir", default=str(DEFAULT_OUTPUT_ROOT), help="Base output directory (default: out/feeds/icscisa-kisa)")
|
||||
parser.add_argument("--run-date", default=None, help="Override run date (YYYYMMDD)")
|
||||
parser.add_argument("--run-id", default=None, help="Override run id")
|
||||
parser.add_argument("--live", action="store_true", default=False, help="Force live fetch (default: enabled via env LIVE_FETCH=true)")
|
||||
parser.add_argument("--offline", action="store_true", default=False, help="Force offline samples only")
|
||||
args = parser.parse_args()
|
||||
|
||||
now = utcnow()
|
||||
run_date = args.run_date or now.strftime("%Y%m%d")
|
||||
run_id = args.run_id or f"icscisa-kisa-{now.strftime('%Y%m%dT%H%M%SZ')}"
|
||||
fetched_at = iso(now)
|
||||
start = fetched_at
|
||||
|
||||
live_fetch = args.live or os.getenv("LIVE_FETCH", "true").lower() == "true"
|
||||
offline_only = args.offline or os.getenv("OFFLINE_SNAPSHOT", "false").lower() == "true"
|
||||
|
||||
output_root = Path(args.out_dir)
|
||||
output_dir = output_root / run_date
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
previous_path = find_previous_snapshot(output_root, run_date)
|
||||
|
||||
gateway_host = os.getenv("FEED_GATEWAY_HOST", DEFAULT_GATEWAY_HOST)
|
||||
gateway_scheme = os.getenv("FEED_GATEWAY_SCHEME", DEFAULT_GATEWAY_SCHEME)
|
||||
|
||||
def resolve_feed(url_env: str, default_url: str) -> str:
|
||||
if url_env:
|
||||
return url_env
|
||||
parsed = urlparse(default_url)
|
||||
# Replace host/scheme to allow on-prem DNS (docker network) defaults.
|
||||
rewritten = parsed._replace(netloc=gateway_host, scheme=gateway_scheme)
|
||||
return urlunparse(rewritten)
|
||||
|
||||
resolved_icscisa_url = resolve_feed(os.getenv("ICSCISA_FEED_URL"), DEFAULT_ICSCISA_URL)
|
||||
resolved_kisa_url = resolve_feed(os.getenv("KISA_FEED_URL"), DEFAULT_KISA_URL)
|
||||
|
||||
records, status = build_records(
|
||||
run_id=run_id,
|
||||
fetched_at=fetched_at,
|
||||
live_fetch=live_fetch,
|
||||
offline_only=offline_only,
|
||||
icscisa_url=resolved_icscisa_url,
|
||||
kisa_url=resolved_kisa_url,
|
||||
)
|
||||
|
||||
write_ndjson(records, output_dir / "advisories.ndjson")
|
||||
|
||||
delta = compute_delta(records, previous_path)
|
||||
delta_payload = {
|
||||
"run_id": run_id,
|
||||
"generated_at": iso(utcnow()),
|
||||
**delta,
|
||||
"previous_snapshot_sha256": load_previous_hash(previous_path),
|
||||
}
|
||||
(output_dir / "delta.json").write_text(json.dumps(delta_payload, separators=(",", ":")) + "\n", encoding="utf-8")
|
||||
|
||||
end = iso(utcnow())
|
||||
write_fetch_log(
|
||||
output_dir / "fetch.log",
|
||||
run_id,
|
||||
start,
|
||||
end,
|
||||
status,
|
||||
gateway_host=gateway_host,
|
||||
gateway_scheme=gateway_scheme,
|
||||
icscisa_url=resolved_icscisa_url,
|
||||
kisa_url=resolved_kisa_url,
|
||||
live_fetch=live_fetch and not offline_only,
|
||||
offline_only=offline_only,
|
||||
)
|
||||
write_hashes(output_dir)
|
||||
|
||||
print(f"[ok] wrote {len(records)} advisories to {output_dir}")
|
||||
print(f" run_id={run_id} live_fetch={live_fetch and not offline_only} offline_only={offline_only}")
|
||||
print(f" gateway={gateway_scheme}://{gateway_host}")
|
||||
print(f" icscisa_url={resolved_icscisa_url}")
|
||||
print(f" kisa_url={resolved_kisa_url}")
|
||||
print(f" status={status}")
|
||||
if previous_path:
|
||||
print(f" previous_snapshot={previous_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
2
deploy/tools/feeds/vex/requirements.txt
Normal file
2
deploy/tools/feeds/vex/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
blake3==0.4.1
|
||||
jsonschema==4.22.0
|
||||
176
deploy/tools/feeds/vex/verify_proof_bundle.py
Normal file
176
deploy/tools/feeds/vex/verify_proof_bundle.py
Normal file
@@ -0,0 +1,176 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Offline verifier for StellaOps VEX proof bundles.
|
||||
|
||||
- Validates the bundle against `docs/benchmarks/vex-evidence-playbook.schema.json`.
|
||||
- Checks justification IDs against the signed catalog.
|
||||
- Recomputes hashes for CAS artefacts, OpenVEX payload, and DSSE envelopes.
|
||||
- Enforces coverage and negative-test requirements per task VEX-GAPS-401-062.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import json
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from typing import Dict, Any
|
||||
|
||||
import jsonschema
|
||||
from blake3 import blake3
|
||||
|
||||
|
||||
def load_json(path: Path) -> Any:
|
||||
return json.loads(path.read_text(encoding="utf-8"))
|
||||
|
||||
|
||||
def digest_for(data: bytes, algo: str) -> str:
|
||||
if algo == "sha256":
|
||||
import hashlib
|
||||
|
||||
return hashlib.sha256(data).hexdigest()
|
||||
if algo == "blake3":
|
||||
return blake3(data).hexdigest()
|
||||
raise ValueError(f"Unsupported hash algorithm: {algo}")
|
||||
|
||||
|
||||
def parse_digest(digest: str) -> tuple[str, str]:
|
||||
if ":" not in digest:
|
||||
raise ValueError(f"Digest missing prefix: {digest}")
|
||||
algo, value = digest.split(":", 1)
|
||||
return algo, value
|
||||
|
||||
|
||||
def verify_digest(path: Path, expected: str) -> None:
|
||||
algo, value = parse_digest(expected)
|
||||
actual = digest_for(path.read_bytes(), algo)
|
||||
if actual.lower() != value.lower():
|
||||
raise ValueError(f"Digest mismatch for {path}: expected {value}, got {actual}")
|
||||
|
||||
|
||||
def resolve_cas_uri(cas_root: Path, cas_uri: str) -> Path:
|
||||
if not cas_uri.startswith("cas://"):
|
||||
raise ValueError(f"CAS URI must start with cas:// — got {cas_uri}")
|
||||
relative = cas_uri[len("cas://") :]
|
||||
return cas_root / relative
|
||||
|
||||
|
||||
def verify_dsse(dsse_ref: Dict[str, Any]) -> None:
|
||||
path = Path(dsse_ref["path"])
|
||||
verify_digest(path, dsse_ref["sha256"])
|
||||
if "payload_sha256" in dsse_ref:
|
||||
envelope = load_json(path)
|
||||
payload = base64.b64decode(envelope["payload"])
|
||||
verify_digest_from_bytes(payload, dsse_ref["payload_sha256"])
|
||||
|
||||
|
||||
def verify_digest_from_bytes(data: bytes, expected: str) -> None:
|
||||
algo, value = parse_digest(expected)
|
||||
actual = digest_for(data, algo)
|
||||
if actual.lower() != value.lower():
|
||||
raise ValueError(f"Digest mismatch for payload: expected {value}, got {actual}")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Verify a StellaOps VEX proof bundle.")
|
||||
parser.add_argument("--bundle", required=True, type=Path)
|
||||
parser.add_argument("--schema", required=True, type=Path)
|
||||
parser.add_argument("--catalog", required=True, type=Path)
|
||||
parser.add_argument("--cas-root", required=True, type=Path)
|
||||
parser.add_argument("--min-coverage", type=float, default=95.0)
|
||||
args = parser.parse_args()
|
||||
|
||||
bundle = load_json(args.bundle)
|
||||
schema = load_json(args.schema)
|
||||
catalog = load_json(args.catalog)
|
||||
|
||||
jsonschema.validate(instance=bundle, schema=schema)
|
||||
|
||||
justification_ids = {entry["id"] for entry in catalog.get("entries", [])}
|
||||
if bundle["justification"]["id"] not in justification_ids:
|
||||
raise ValueError(f"Justification {bundle['justification']['id']} not found in catalog")
|
||||
|
||||
# Justification DSSE integrity
|
||||
if "dsse" in bundle["justification"]:
|
||||
verify_dsse(bundle["justification"]["dsse"])
|
||||
|
||||
# OpenVEX canonical hashes
|
||||
openvex_path = Path(bundle["openvex"]["path"])
|
||||
openvex_bytes = openvex_path.read_bytes()
|
||||
verify_digest_from_bytes(openvex_bytes, bundle["openvex"]["canonical_sha256"])
|
||||
verify_digest_from_bytes(openvex_bytes, bundle["openvex"]["canonical_blake3"])
|
||||
|
||||
# CAS evidence
|
||||
evidence_by_type: Dict[str, Dict[str, Any]] = {}
|
||||
for ev in bundle["evidence"]:
|
||||
ev_path = resolve_cas_uri(args.cas_root, ev["cas_uri"])
|
||||
verify_digest(ev_path, ev["hash"])
|
||||
if "dsse" in ev:
|
||||
verify_dsse(ev["dsse"])
|
||||
evidence_by_type.setdefault(ev["type"], ev)
|
||||
|
||||
# Graph hash alignment
|
||||
graph = bundle["graph"]
|
||||
graph_evidence = evidence_by_type.get("graph")
|
||||
if not graph_evidence:
|
||||
raise ValueError("Graph evidence missing from bundle")
|
||||
if graph["hash"].lower() != graph_evidence["hash"].lower():
|
||||
raise ValueError("Graph hash does not match evidence hash")
|
||||
if "dsse" in graph:
|
||||
verify_dsse(graph["dsse"])
|
||||
|
||||
# Entrypoint coverage + negative tests + config/flags hashes
|
||||
for ep in bundle["entrypoints"]:
|
||||
if ep["coverage_percent"] < args.min_coverage:
|
||||
raise ValueError(
|
||||
f"Entrypoint {ep['id']} coverage {ep['coverage_percent']} below required {args.min_coverage}"
|
||||
)
|
||||
if not ep["negative_tests"]:
|
||||
raise ValueError(f"Entrypoint {ep['id']} missing negative test confirmation")
|
||||
config_ev = evidence_by_type.get("config")
|
||||
if not config_ev or config_ev["hash"].lower() != ep["config_hash"].lower():
|
||||
raise ValueError(f"Entrypoint {ep['id']} config_hash not backed by evidence")
|
||||
flags_ev = evidence_by_type.get("flags")
|
||||
if not flags_ev or flags_ev["hash"].lower() != ep["flags_hash"].lower():
|
||||
raise ValueError(f"Entrypoint {ep['id']} flags_hash not backed by evidence")
|
||||
|
||||
# RBAC enforcement
|
||||
rbac = bundle["rbac"]
|
||||
if rbac["approvals_required"] < 1 or not rbac["roles_allowed"]:
|
||||
raise ValueError("RBAC section is incomplete")
|
||||
|
||||
# Reevaluation triggers: must all be true to satisfy VEX-GAPS-401-062
|
||||
reevaluation = bundle["reevaluation"]
|
||||
if not all(
|
||||
[
|
||||
reevaluation.get("on_sbom_change"),
|
||||
reevaluation.get("on_graph_change"),
|
||||
reevaluation.get("on_runtime_change"),
|
||||
]
|
||||
):
|
||||
raise ValueError("Reevaluation triggers must all be true")
|
||||
|
||||
# Uncertainty gating present
|
||||
uncertainty = bundle["uncertainty"]
|
||||
if uncertainty["state"] not in {"U0-none", "U1-low", "U2-medium", "U3-high"}:
|
||||
raise ValueError("Invalid uncertainty state")
|
||||
|
||||
# Signature envelope integrity (best-effort)
|
||||
default_dsse_path = args.bundle.with_suffix(".dsse.json")
|
||||
if default_dsse_path.exists():
|
||||
sig_envelope_digest = f"sha256:{digest_for(default_dsse_path.read_bytes(), 'sha256')}"
|
||||
for sig in bundle["signatures"]:
|
||||
if sig["envelope_digest"].lower() != sig_envelope_digest.lower():
|
||||
raise ValueError("Signature envelope digest mismatch")
|
||||
|
||||
print("✔ VEX proof bundle verified")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
sys.exit(main())
|
||||
except Exception as exc: # pragma: no cover - top-level guard
|
||||
print(f"Verification failed: {exc}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
63
deploy/tools/security/attest/build-attestation-bundle.sh
Normal file
63
deploy/tools/security/attest/build-attestation-bundle.sh
Normal file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# DEVOPS-ATTEST-74-002: package attestation outputs into an offline bundle with checksums.
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: $0 <attest-dir> [bundle-out]" >&2
|
||||
exit 64
|
||||
fi
|
||||
|
||||
ATTEST_DIR=$1
|
||||
BUNDLE_OUT=${2:-"out/attest-bundles"}
|
||||
|
||||
if [[ ! -d "$ATTEST_DIR" ]]; then
|
||||
echo "[attest-bundle] attestation directory not found: $ATTEST_DIR" >&2
|
||||
exit 66
|
||||
fi
|
||||
|
||||
mkdir -p "$BUNDLE_OUT"
|
||||
|
||||
TS=$(date -u +"%Y%m%dT%H%M%SZ")
|
||||
BUNDLE_NAME="attestation-bundle-${TS}"
|
||||
WORK_DIR="${BUNDLE_OUT}/${BUNDLE_NAME}"
|
||||
mkdir -p "$WORK_DIR"
|
||||
|
||||
copy_if_exists() {
|
||||
local pattern="$1"
|
||||
shopt -s nullglob
|
||||
local files=("$ATTEST_DIR"/$pattern)
|
||||
if (( ${#files[@]} > 0 )); then
|
||||
cp "${files[@]}" "$WORK_DIR/"
|
||||
fi
|
||||
shopt -u nullglob
|
||||
}
|
||||
|
||||
# Collect common attestation artefacts
|
||||
copy_if_exists "*.dsse.json"
|
||||
copy_if_exists "*.in-toto.jsonl"
|
||||
copy_if_exists "*.sarif"
|
||||
copy_if_exists "*.intoto.json"
|
||||
copy_if_exists "*.rekor.txt"
|
||||
copy_if_exists "*.sig"
|
||||
copy_if_exists "*.crt"
|
||||
copy_if_exists "*.pem"
|
||||
copy_if_exists "*.json"
|
||||
|
||||
# Manifest
|
||||
cat > "${WORK_DIR}/manifest.json" <<EOF
|
||||
{
|
||||
"created_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
|
||||
"source_dir": "${ATTEST_DIR}",
|
||||
"files": $(ls -1 "${WORK_DIR}" | jq -R . | jq -s .)
|
||||
}
|
||||
EOF
|
||||
|
||||
# Checksums
|
||||
(
|
||||
cd "$WORK_DIR"
|
||||
sha256sum * > SHA256SUMS
|
||||
)
|
||||
|
||||
tar -C "$BUNDLE_OUT" -czf "${WORK_DIR}.tgz" "${BUNDLE_NAME}"
|
||||
echo "[attest-bundle] bundle created at ${WORK_DIR}.tgz"
|
||||
124
deploy/tools/security/cosign/README.md
Normal file
124
deploy/tools/security/cosign/README.md
Normal file
@@ -0,0 +1,124 @@
|
||||
# Cosign binaries (runtime/signals signing)
|
||||
|
||||
## Preferred (system)
|
||||
- Version: `v3.0.2`
|
||||
- Path: `/usr/local/bin/cosign` (installed on WSL Debian host)
|
||||
- Breaking change: v3 requires `--bundle <file>` when signing blobs; older `--output-signature`/`--output-certificate` pairs are deprecated.
|
||||
|
||||
## Offline fallback (repo-pinned)
|
||||
- Version: `v2.6.0`
|
||||
- Binary: `tools/cosign/cosign` → `tools/cosign/v2.6.0/cosign-linux-amd64`
|
||||
- SHA256: `ea5c65f99425d6cfbb5c4b5de5dac035f14d09131c1a0ea7c7fc32eab39364f9`
|
||||
- Check: `cd tools/cosign/v2.6.0 && sha256sum -c cosign_checksums.txt --ignore-missing`
|
||||
|
||||
## Usage examples
|
||||
- v3 DSSE blob: `cosign sign-blob --key cosign.key --predicate-type stella.ops/confidenceDecayConfig@v1 --bundle confidence_decay_config.sigstore.json decay/confidence_decay_config.yaml`
|
||||
- v3 verify: `cosign verify-blob --bundle confidence_decay_config.sigstore.json decay/confidence_decay_config.yaml`
|
||||
- To force offline fallback, export `PATH=./tools/cosign:$PATH` (ensures v2.6.0 is used).
|
||||
|
||||
## CI Workflow: signals-dsse-sign.yml
|
||||
|
||||
The `.gitea/workflows/signals-dsse-sign.yml` workflow automates DSSE signing for Signals artifacts.
|
||||
|
||||
### Required Secrets
|
||||
| Secret | Description | Required |
|
||||
|--------|-------------|----------|
|
||||
| `COSIGN_PRIVATE_KEY_B64` | Base64-encoded cosign private key | Yes (for production) |
|
||||
| `COSIGN_PASSWORD` | Password for the private key | If key is encrypted |
|
||||
| `CI_EVIDENCE_LOCKER_TOKEN` | Token for Evidence Locker upload | Optional |
|
||||
|
||||
### Trigger Options
|
||||
1. **Automatic**: On push to `main` when signals artifacts change
|
||||
2. **Manual**: Via workflow_dispatch with options:
|
||||
- `out_dir`: Output directory (default: `evidence-locker/signals/2025-12-01`)
|
||||
- `allow_dev_key`: Set to `1` for testing with dev key
|
||||
|
||||
### Setting Up CI Secrets
|
||||
```bash
|
||||
# Generate production key pair (do this once, securely)
|
||||
cosign generate-key-pair
|
||||
|
||||
# Base64 encode the private key
|
||||
cat cosign.key | base64 -w0 > cosign.key.b64
|
||||
|
||||
# Add to Gitea secrets:
|
||||
# - COSIGN_PRIVATE_KEY_B64: contents of cosign.key.b64
|
||||
# - COSIGN_PASSWORD: password used during key generation
|
||||
```
|
||||
|
||||
## CI / secrets (manual usage)
|
||||
- CI should provide a base64-encoded private key via secret `COSIGN_PRIVATE_KEY_B64` and optional password in `COSIGN_PASSWORD`.
|
||||
- Example bootstrap in jobs:
|
||||
```bash
|
||||
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > /tmp/cosign.key
|
||||
chmod 600 /tmp/cosign.key
|
||||
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" cosign version
|
||||
```
|
||||
- For local dev, copy your own key to `tools/cosign/cosign.key` or export `COSIGN_PRIVATE_KEY_B64` before running signing scripts. Never commit real keys; only `cosign.key.example` lives in git.
|
||||
|
||||
## Development signing key
|
||||
|
||||
A development key pair is provided for local testing and smoke tests:
|
||||
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `tools/cosign/cosign.dev.key` | Private key (password-protected) |
|
||||
| `tools/cosign/cosign.dev.pub` | Public key for verification |
|
||||
|
||||
### Usage
|
||||
```bash
|
||||
# Sign signals artifacts with dev key
|
||||
COSIGN_ALLOW_DEV_KEY=1 COSIGN_PASSWORD=stellaops-dev \
|
||||
OUT_DIR=docs/modules/signals/dev-test \
|
||||
tools/cosign/sign-signals.sh
|
||||
|
||||
# Verify a signature
|
||||
cosign verify-blob \
|
||||
--key tools/cosign/cosign.dev.pub \
|
||||
--bundle docs/modules/signals/dev-test/confidence_decay_config.sigstore.json \
|
||||
docs/modules/signals/decay/confidence_decay_config.yaml
|
||||
```
|
||||
|
||||
### Security Notes
|
||||
- Password: `stellaops-dev` (do not reuse elsewhere)
|
||||
- **NOT** for production or Evidence Locker ingestion
|
||||
- Real signing requires the Signals Guild key via `COSIGN_PRIVATE_KEY_B64` (CI) or `tools/cosign/cosign.key` (local drop-in)
|
||||
- `sign-signals.sh` requires `COSIGN_ALLOW_DEV_KEY=1` to use the dev key; otherwise it refuses
|
||||
- The signing helper disables tlog upload (`--tlog-upload=false`) and auto-accepts prompts (`--yes`) for offline runs
|
||||
|
||||
## Signing Scripts
|
||||
|
||||
### sign-signals.sh
|
||||
Signs decay config, unknowns manifest, and heuristics catalog with DSSE envelopes.
|
||||
|
||||
```bash
|
||||
# Production (CI secret or cosign.key drop-in)
|
||||
OUT_DIR=evidence-locker/signals/2025-12-01 tools/cosign/sign-signals.sh
|
||||
|
||||
# Development (dev key)
|
||||
COSIGN_ALLOW_DEV_KEY=1 COSIGN_PASSWORD=stellaops-dev \
|
||||
OUT_DIR=docs/modules/signals/dev-test \
|
||||
tools/cosign/sign-signals.sh
|
||||
```
|
||||
|
||||
### Key Resolution Order
|
||||
1. `COSIGN_KEY_FILE` environment variable
|
||||
2. `COSIGN_PRIVATE_KEY_B64` (decoded to temp file)
|
||||
3. `tools/cosign/cosign.key` (production drop-in)
|
||||
4. `tools/cosign/cosign.dev.key` (only if `COSIGN_ALLOW_DEV_KEY=1`)
|
||||
|
||||
### sign-authority-gaps.sh
|
||||
Signs Authority gap artefacts (AU1–AU10, RR1–RR10) under `docs/modules/authority/gaps/artifacts/`.
|
||||
|
||||
```
|
||||
# Production (Authority key via CI secret or cosign.key drop-in)
|
||||
OUT_DIR=docs/modules/authority/gaps/dsse/2025-12-04 tools/cosign/sign-authority-gaps.sh
|
||||
|
||||
# Development (dev key, smoke only)
|
||||
COSIGN_ALLOW_DEV_KEY=1 COSIGN_PASSWORD=stellaops-dev \
|
||||
OUT_DIR=docs/modules/authority/gaps/dev-smoke/2025-12-04 \
|
||||
tools/cosign/sign-authority-gaps.sh
|
||||
```
|
||||
|
||||
- Outputs bundles or dsse signatures plus `SHA256SUMS` in `OUT_DIR`.
|
||||
- tlog upload disabled (`--tlog-upload=false`) and prompts auto-accepted (`--yes`) for offline use.
|
||||
1
deploy/tools/security/cosign/cosign
Normal file
1
deploy/tools/security/cosign/cosign
Normal file
@@ -0,0 +1 @@
|
||||
v2.6.0/cosign-linux-amd64
|
||||
11
deploy/tools/security/cosign/cosign.dev.key
Normal file
11
deploy/tools/security/cosign/cosign.dev.key
Normal file
@@ -0,0 +1,11 @@
|
||||
-----BEGIN ENCRYPTED SIGSTORE PRIVATE KEY-----
|
||||
eyJrZGYiOnsibmFtZSI6InNjcnlwdCIsInBhcmFtcyI6eyJOIjo2NTUzNiwiciI6
|
||||
OCwicCI6MX0sInNhbHQiOiJ5dlhpaXliR2lTR0NPS2x0Q2M1dlFhTy91S3pBVzNs
|
||||
Skl3QTRaU2dEMTAwPSJ9LCJjaXBoZXIiOnsibmFtZSI6Im5hY2wvc2VjcmV0Ym94
|
||||
Iiwibm9uY2UiOiIyNHA0T2xJZnJxdnhPVnM3dlY2MXNwVGpkNk80cVBEVCJ9LCJj
|
||||
aXBoZXJ0ZXh0IjoiTHRWSGRqVi94MXJrYXhscGxJbVB5dkVtc2NBYTB5dW5oakZ5
|
||||
UUFiZ1RSNVdZL3lCS0tYMWdFb09hclZDWksrQU0yY0tIM2tJQWlJNWlMd1AvV3c5
|
||||
Q3k2SVY1ek4za014cExpcjJ1QVZNV3c3Y3BiYUhnNjV4TzNOYkEwLzJOSi84R0dN
|
||||
NWt1QXhJRWsraER3ZWJ4Tld4WkRtNEZ4NTJVcVJxa2NPT09vNk9xWXB4OWFMaVZw
|
||||
RjgzRElGZFpRK2R4K05RUnUxUmNrKzBtOHc9PSJ9
|
||||
-----END ENCRYPTED SIGSTORE PRIVATE KEY-----
|
||||
4
deploy/tools/security/cosign/cosign.dev.pub
Normal file
4
deploy/tools/security/cosign/cosign.dev.pub
Normal file
@@ -0,0 +1,4 @@
|
||||
-----BEGIN PUBLIC KEY-----
|
||||
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEfoI+9RFCTcfjeMqpCQ3FAyvKwBQU
|
||||
YAIM2cfDR8W98OxnXV+gfV5Dhfoi8qofAnG/vC7DbBlX2t/gT7GKUZAChA==
|
||||
-----END PUBLIC KEY-----
|
||||
8
deploy/tools/security/cosign/cosign.key.example
Normal file
8
deploy/tools/security/cosign/cosign.key.example
Normal file
@@ -0,0 +1,8 @@
|
||||
# Placeholder development cosign key
|
||||
#
|
||||
# Do not use in production. Generate your own:
|
||||
# cosign generate-key-pair
|
||||
#
|
||||
# Store the private key securely (e.g., CI secret COSIGN_PRIVATE_KEY_B64).
|
||||
#
|
||||
# This file exists only as a path stub for tooling; it is not a real key.
|
||||
BIN
deploy/tools/security/cosign/v2.6.0/cosign-linux-amd64
Normal file
BIN
deploy/tools/security/cosign/v2.6.0/cosign-linux-amd64
Normal file
Binary file not shown.
40
deploy/tools/security/cosign/v2.6.0/cosign_checksums.txt
Normal file
40
deploy/tools/security/cosign/v2.6.0/cosign_checksums.txt
Normal file
@@ -0,0 +1,40 @@
|
||||
e8c634db1252725eabfd517f02e6ebf0d07bfba5b4779d7b45ef373ceff07b38 cosign-2.6.0-1.aarch64.rpm
|
||||
9de55601c34fe7a8eaecb7a2fab93da032dd91d423a04ae6ac17e3f5ed99ec72 cosign-2.6.0-1.armv7hl.rpm
|
||||
f7281a822306c35f2bd66c055ba6f77a7298de3375a401b12664035b8b323fdf cosign-2.6.0-1.ppc64le.rpm
|
||||
814b890a07b56bcc6a42dfdf9004fadfe45c112e9b11a0c2f4ebf45568e72b4c cosign-2.6.0-1.riscv64.rpm
|
||||
19241a09cc065f062d63a9c9ce45ed7c7ff839b93672be4688334b925809d266 cosign-2.6.0-1.s390x.rpm
|
||||
52709467f072043f24553c6dd1e0f287eeeedb23340dd90a4438b8506df0a0bc cosign-2.6.0-1.x86_64.rpm
|
||||
83b0fb42bc265e62aef7de49f4979b7957c9b7320d362a9f20046b2f823330f3 cosign-darwin-amd64
|
||||
3bcbcfc41d89e162e47ba08f70ffeffaac567f663afb3545c0265a5041ce652d cosign-darwin-amd64_2.6.0_darwin_amd64.sbom.json
|
||||
dea5b83b8b375b99ac803c7bdb1f798963dbeb47789ceb72153202e7f20e8d07 cosign-darwin-arm64
|
||||
c09a84869eb31fcf334e54d0a9f81bf466ba7444dc975a8fe46b94d742288980 cosign-darwin-arm64_2.6.0_darwin_arm64.sbom.json
|
||||
ea5c65f99425d6cfbb5c4b5de5dac035f14d09131c1a0ea7c7fc32eab39364f9 cosign-linux-amd64
|
||||
b4ccc276a5cc326f87d81fd1ae12f12a8dba64214ec368a39401522cccae7f9a cosign-linux-amd64_2.6.0_linux_amd64.sbom.json
|
||||
641e05c21ce423cd263a49b1f9ffca58e2df022cb12020dcea63f8317c456950 cosign-linux-arm
|
||||
e09684650882fd721ed22b716ffc399ee11426cd4d1c9b4fec539cba8bf46b86 cosign-linux-arm64
|
||||
d05d37f6965c3f3c77260171289281dbf88d1f2b07e865bf9d4fd94d9f2fe5c4 cosign-linux-arm64_2.6.0_linux_arm64.sbom.json
|
||||
1b8b96535a7c30dbecead51ac3f51f559b31d8ab1dd4842562f857ebb1941fa5 cosign-linux-arm_2.6.0_linux_arm.sbom.json
|
||||
6fa93dbd97664ccce6c3e5221e22e14547b0d202ba829e2b34a3479266b33751 cosign-linux-pivkey-pkcs11key-amd64
|
||||
17b9803701f5908476d5904492b7a4d1568b86094c3fbb5a06afaa62a6910e8c cosign-linux-pivkey-pkcs11key-amd64_2.6.0_linux_amd64.sbom.json
|
||||
fbb78394e6fc19a2f34fea4ba03ea796aca84b666b6cdf65f46775f295fc9103 cosign-linux-pivkey-pkcs11key-arm64
|
||||
35ac308bd9c59844e056f6251ab76184bfc321cb1b3ac337fdb94a9a289d4d44 cosign-linux-pivkey-pkcs11key-arm64_2.6.0_linux_arm64.sbom.json
|
||||
bd9cc643ec8a517ca66b22221b830dc9d6064bd4f3b76579e4e28b6af5cfba5f cosign-linux-ppc64le
|
||||
ef04b0e087b95ce1ba7a902ecc962e50bfc974da0bd6b5db59c50880215a3f06 cosign-linux-ppc64le_2.6.0_linux_ppc64le.sbom.json
|
||||
17c8ff6a5dc48d3802b511c3eb7495da6142397ace28af9a1baa58fb34fad75c cosign-linux-riscv64
|
||||
2007628a662808f221dc1983d9fba2676df32bb98717f89360cd191c929492ba cosign-linux-riscv64_2.6.0_linux_riscv64.sbom.json
|
||||
7f7f042e7131950c658ff87079ac9080e7d64392915f06811f06a96238c242c1 cosign-linux-s390x
|
||||
e22a35083b21552c80bafb747c022aa2aad302c861a392199bc2a8fad22dd6b5 cosign-linux-s390x_2.6.0_linux_s390x.sbom.json
|
||||
7beb4dd1e19a72c328bbf7c0d7342d744edbf5cbb082f227b2b76e04a21c16ef cosign-windows-amd64.exe
|
||||
8110eab8c5842caf93cf05dd26f260b6836d93b0263e49e06c1bd22dd5abb82c cosign-windows-amd64.exe_2.6.0_windows_amd64.sbom.json
|
||||
7713d587f8668ce8f2a48556ee17f47c281cfb90102adfdb7182de62bc016cab cosign_2.6.0_aarch64.apk
|
||||
c51b6437559624ef88b29a1ddd88d0782549b585dbbae0a5cb2fcc02bec72687 cosign_2.6.0_amd64.deb
|
||||
438baaa35101e9982081c6450a44ea19e04cd4d2aba283ed52242e451736990b cosign_2.6.0_arm64.deb
|
||||
8dc33858a68e18bf0cc2cb18c2ba0a7d829aa59ad3125366b24477e7d6188024 cosign_2.6.0_armhf.deb
|
||||
88397077deee943690033276eef5206f7c60a30ea5f6ced66a51601ce79d0d0e cosign_2.6.0_armv7.apk
|
||||
ca45b82cde86634705187f2361363e67c70c23212283594ff942d583a543f9dd cosign_2.6.0_ppc64el.deb
|
||||
497f1a6d3899493153a4426286e673422e357224f3f931fdc028455db2fb5716 cosign_2.6.0_ppc64le.apk
|
||||
1e37d9c3d278323095899897236452858c0bc49b52a48c3bcf8ce7a236bf2ee1 cosign_2.6.0_riscv64.apk
|
||||
f2f65cf3d115fa5b25c61f6692449df2f4da58002a99e3efacc52a848fd3bca8 cosign_2.6.0_riscv64.deb
|
||||
af0a62231880fd3495bbd1f5d4c64384034464b80930b7ffcd819d7152e75759 cosign_2.6.0_s390x.apk
|
||||
e282d9337e4ba163a48ff1175855a6f6d6fbb562bc6c576c93944a6126984203 cosign_2.6.0_s390x.deb
|
||||
382a842b2242656ecd442ae461c4dc454a366ed50d41a2dafcce8b689bfd03e4 cosign_2.6.0_x86_64.apk
|
||||
220
deploy/tools/security/crypto/download-cryptopro-playwright.cjs
Normal file
220
deploy/tools/security/crypto/download-cryptopro-playwright.cjs
Normal file
@@ -0,0 +1,220 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* CryptoPro CSP downloader (Playwright-driven).
|
||||
*
|
||||
* Navigates cryptopro.ru downloads page, optionally fills login form, and selects
|
||||
* Linux packages (.rpm/.deb/.tar.gz/.tgz/.bin) under the CSP Linux section.
|
||||
*
|
||||
* Environment:
|
||||
* - CRYPTOPRO_URL (default: https://cryptopro.ru/products/csp/downloads#latest_csp50r3_linux)
|
||||
* - CRYPTOPRO_EMAIL / CRYPTOPRO_PASSWORD (default demo creds: contact@stella-ops.org / Hoko33JD3nj3aJD.)
|
||||
* - CRYPTOPRO_DRY_RUN (default: 1) -> list candidates, do not download
|
||||
* - CRYPTOPRO_OUTPUT_DIR (default: /opt/cryptopro/downloads)
|
||||
* - CRYPTOPRO_OUTPUT_FILE (optional: force a specific output filename/path)
|
||||
* - CRYPTOPRO_UNPACK (default: 0) -> attempt to unpack tar.gz/tgz/rpm/deb
|
||||
*/
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { spawnSync } = require('child_process');
|
||||
const { chromium } = require('playwright-chromium');
|
||||
|
||||
const url = process.env.CRYPTOPRO_URL || 'https://cryptopro.ru/products/csp/downloads#latest_csp50r3_linux';
|
||||
const email = process.env.CRYPTOPRO_EMAIL || 'contact@stella-ops.org';
|
||||
const password = process.env.CRYPTOPRO_PASSWORD || 'Hoko33JD3nj3aJD.';
|
||||
const dryRun = (process.env.CRYPTOPRO_DRY_RUN || '1') !== '0';
|
||||
const outputDir = process.env.CRYPTOPRO_OUTPUT_DIR || '/opt/cryptopro/downloads';
|
||||
const outputFile = process.env.CRYPTOPRO_OUTPUT_FILE;
|
||||
const unpack = (process.env.CRYPTOPRO_UNPACK || '0') === '1';
|
||||
const navTimeout = parseInt(process.env.CRYPTOPRO_NAV_TIMEOUT || '60000', 10);
|
||||
|
||||
const linuxPattern = /\.(rpm|deb|tar\.gz|tgz|bin)(\?|$)/i;
|
||||
const debugLinks = (process.env.CRYPTOPRO_DEBUG || '0') === '1';
|
||||
|
||||
function log(msg) {
|
||||
process.stdout.write(`${msg}\n`);
|
||||
}
|
||||
|
||||
function warn(msg) {
|
||||
process.stderr.write(`[WARN] ${msg}\n`);
|
||||
}
|
||||
|
||||
async function maybeLogin(page) {
|
||||
const emailSelector = 'input[type="email"], input[name*="email" i], input[name*="login" i], input[name="name"]';
|
||||
const passwordSelector = 'input[type="password"], input[name*="password" i]';
|
||||
const submitSelector = 'button[type="submit"], input[type="submit"]';
|
||||
|
||||
const emailInput = await page.$(emailSelector);
|
||||
const passwordInput = await page.$(passwordSelector);
|
||||
if (emailInput && passwordInput) {
|
||||
log('[login] Form detected; submitting credentials');
|
||||
await emailInput.fill(email);
|
||||
await passwordInput.fill(password);
|
||||
const submit = await page.$(submitSelector);
|
||||
if (submit) {
|
||||
await Promise.all([
|
||||
page.waitForNavigation({ waitUntil: 'networkidle', timeout: 15000 }).catch(() => {}),
|
||||
submit.click()
|
||||
]);
|
||||
} else {
|
||||
await passwordInput.press('Enter');
|
||||
await page.waitForTimeout(2000);
|
||||
}
|
||||
} else {
|
||||
log('[login] No login form detected; continuing anonymously');
|
||||
}
|
||||
}
|
||||
|
||||
async function findLinuxLinks(page) {
|
||||
const targets = [page, ...page.frames()];
|
||||
const hrefs = [];
|
||||
|
||||
// Collect href/data-href/data-url across main page + frames
|
||||
for (const target of targets) {
|
||||
try {
|
||||
const collected = await target.$$eval('a[href], [data-href], [data-url]', (els) =>
|
||||
els
|
||||
.map((el) => el.getAttribute('href') || el.getAttribute('data-href') || el.getAttribute('data-url'))
|
||||
.filter((href) => typeof href === 'string')
|
||||
);
|
||||
hrefs.push(...collected);
|
||||
} catch (err) {
|
||||
warn(`[scan] Failed to collect links from frame: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
const unique = Array.from(new Set(hrefs));
|
||||
return unique.filter((href) => linuxPattern.test(href));
|
||||
}
|
||||
|
||||
function unpackIfSupported(filePath) {
|
||||
if (!unpack) {
|
||||
return;
|
||||
}
|
||||
const cwd = path.dirname(filePath);
|
||||
if (filePath.endsWith('.tar.gz') || filePath.endsWith('.tgz')) {
|
||||
const res = spawnSync('tar', ['-xzf', filePath, '-C', cwd], { stdio: 'inherit' });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Extracted ${filePath}`);
|
||||
} else {
|
||||
warn(`[unpack] Failed to extract ${filePath}`);
|
||||
}
|
||||
} else if (filePath.endsWith('.rpm')) {
|
||||
const res = spawnSync('bash', ['-lc', `rpm2cpio "${filePath}" | cpio -idmv`], { stdio: 'inherit', cwd });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Extracted RPM ${filePath}`);
|
||||
} else {
|
||||
warn(`[unpack] Failed to extract RPM ${filePath}`);
|
||||
}
|
||||
} else if (filePath.endsWith('.deb')) {
|
||||
const res = spawnSync('dpkg-deb', ['-x', filePath, cwd], { stdio: 'inherit' });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Extracted DEB ${filePath}`);
|
||||
} else {
|
||||
warn(`[unpack] Failed to extract DEB ${filePath}`);
|
||||
}
|
||||
} else if (filePath.endsWith('.bin')) {
|
||||
const res = spawnSync('chmod', ['+x', filePath], { stdio: 'inherit' });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Marked ${filePath} as executable (self-extract expected)`);
|
||||
} else {
|
||||
warn(`[unpack] Could not mark ${filePath} executable`);
|
||||
}
|
||||
} else {
|
||||
warn(`[unpack] Skipping unsupported archive type for ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
if (email === 'contact@stella-ops.org' && password === 'Hoko33JD3nj3aJD.') {
|
||||
warn('Using default demo credentials; set CRYPTOPRO_EMAIL/CRYPTOPRO_PASSWORD to real customer creds.');
|
||||
}
|
||||
|
||||
const browser = await chromium.launch({ headless: true });
|
||||
const context = await browser.newContext({
|
||||
acceptDownloads: true,
|
||||
httpCredentials: { username: email, password }
|
||||
});
|
||||
const page = await context.newPage();
|
||||
log(`[nav] Opening ${url}`);
|
||||
try {
|
||||
await page.goto(url, { waitUntil: 'networkidle', timeout: navTimeout });
|
||||
} catch (err) {
|
||||
warn(`[nav] Navigation at networkidle failed (${err.message}); retrying with waitUntil=load`);
|
||||
await page.goto(url, { waitUntil: 'load', timeout: navTimeout });
|
||||
}
|
||||
log(`[nav] Landed on ${page.url()}`);
|
||||
await maybeLogin(page);
|
||||
await page.waitForTimeout(2000);
|
||||
|
||||
const loginGate =
|
||||
page.url().includes('/user') ||
|
||||
(await page.$('form#user-login, form[id*="user-login"], .captcha, #captcha-container'));
|
||||
if (loginGate) {
|
||||
warn('[auth] Login/captcha gate detected on downloads page; automated fetch blocked. Provide session/cookies or run headful to solve manually.');
|
||||
await browser.close();
|
||||
return 2;
|
||||
}
|
||||
|
||||
let links = await findLinuxLinks(page);
|
||||
if (links.length === 0) {
|
||||
await page.waitForTimeout(1500);
|
||||
await page.evaluate(() => window.scrollTo(0, document.body.scrollHeight));
|
||||
await page.waitForTimeout(2000);
|
||||
links = await findLinuxLinks(page);
|
||||
}
|
||||
if (links.length === 0) {
|
||||
if (debugLinks) {
|
||||
const targetDir = outputFile ? path.dirname(outputFile) : outputDir;
|
||||
await fs.promises.mkdir(targetDir, { recursive: true });
|
||||
const debugHtml = path.join(targetDir, 'cryptopro-download-page.html');
|
||||
await fs.promises.writeFile(debugHtml, await page.content(), 'utf8');
|
||||
log(`[debug] Saved page HTML to ${debugHtml}`);
|
||||
const allLinks = await page.$$eval('a[href], [data-href], [data-url]', (els) =>
|
||||
els
|
||||
.map((el) => el.getAttribute('href') || el.getAttribute('data-href') || el.getAttribute('data-url'))
|
||||
.filter((href) => typeof href === 'string')
|
||||
);
|
||||
log(`[debug] Total link-like attributes: ${allLinks.length}`);
|
||||
allLinks.slice(0, 20).forEach((href, idx) => log(` [all ${idx + 1}] ${href}`));
|
||||
}
|
||||
warn('No Linux download links found on page.');
|
||||
await browser.close();
|
||||
return 1;
|
||||
}
|
||||
|
||||
log(`[scan] Found ${links.length} Linux candidate links`);
|
||||
links.slice(0, 10).forEach((href, idx) => log(` [${idx + 1}] ${href}`));
|
||||
|
||||
if (dryRun) {
|
||||
log('[mode] Dry-run enabled; not downloading. Set CRYPTOPRO_DRY_RUN=0 to fetch.');
|
||||
await browser.close();
|
||||
return 0;
|
||||
}
|
||||
|
||||
const target = links[0];
|
||||
log(`[download] Fetching ${target}`);
|
||||
const [download] = await Promise.all([
|
||||
page.waitForEvent('download', { timeout: 30000 }),
|
||||
page.goto(target).catch(() => page.click(`a[href="${target}"]`).catch(() => {}))
|
||||
]);
|
||||
|
||||
const targetDir = outputFile ? path.dirname(outputFile) : outputDir;
|
||||
await fs.promises.mkdir(targetDir, { recursive: true });
|
||||
const suggested = download.suggestedFilename();
|
||||
const outPath = outputFile ? outputFile : path.join(outputDir, suggested);
|
||||
await download.saveAs(outPath);
|
||||
log(`[download] Saved to ${outPath}`);
|
||||
|
||||
unpackIfSupported(outPath);
|
||||
|
||||
await browser.close();
|
||||
return 0;
|
||||
}
|
||||
|
||||
main()
|
||||
.then((code) => process.exit(code))
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
69
deploy/tools/security/crypto/package-rootpack-ru.sh
Normal file
69
deploy/tools/security/crypto/package-rootpack-ru.sh
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(git rev-parse --show-toplevel)"
|
||||
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
OUTPUT_ROOT="${1:-${ROOT_DIR}/build/rootpack_ru_${TIMESTAMP}}"
|
||||
ARTIFACT_DIR="${OUTPUT_ROOT}/artifacts"
|
||||
DOC_DIR="${OUTPUT_ROOT}/docs"
|
||||
CONFIG_DIR="${OUTPUT_ROOT}/config"
|
||||
TRUST_DIR="${OUTPUT_ROOT}/trust"
|
||||
|
||||
mkdir -p "$ARTIFACT_DIR" "$DOC_DIR" "$CONFIG_DIR" "$TRUST_DIR"
|
||||
|
||||
publish_plugin() {
|
||||
local project="$1"
|
||||
local name="$2"
|
||||
local publish_dir="${ARTIFACT_DIR}/${name}"
|
||||
echo "[rootpack-ru] Publishing ${project} -> ${publish_dir}"
|
||||
dotnet publish "$project" -c Release -o "$publish_dir" --nologo >/dev/null
|
||||
}
|
||||
|
||||
publish_plugin "src/__Libraries/StellaOps.Cryptography.Plugin.CryptoPro/StellaOps.Cryptography.Plugin.CryptoPro.csproj" "StellaOps.Cryptography.Plugin.CryptoPro"
|
||||
publish_plugin "src/__Libraries/StellaOps.Cryptography.Plugin.Pkcs11Gost/StellaOps.Cryptography.Plugin.Pkcs11Gost.csproj" "StellaOps.Cryptography.Plugin.Pkcs11Gost"
|
||||
|
||||
cp docs/security/rootpack_ru_validation.md "$DOC_DIR/"
|
||||
cp docs/security/crypto-routing-audit-2025-11-07.md "$DOC_DIR/"
|
||||
cp docs/security/rootpack_ru_package.md "$DOC_DIR/"
|
||||
cp etc/rootpack/ru/crypto.profile.yaml "$CONFIG_DIR/rootpack_ru.crypto.yaml"
|
||||
|
||||
if [ "${INCLUDE_GOST_VALIDATION:-1}" != "0" ]; then
|
||||
candidate="${OPENSSL_GOST_LOG_DIR:-}"
|
||||
if [ -z "$candidate" ]; then
|
||||
candidate="$(ls -d "${ROOT_DIR}"/logs/openssl_gost_validation_* "${ROOT_DIR}"/logs/rootpack_ru_*/openssl_gost 2>/dev/null | sort | tail -n 1 || true)"
|
||||
fi
|
||||
|
||||
if [ -n "$candidate" ] && [ -d "$candidate" ]; then
|
||||
mkdir -p "${DOC_DIR}/gost-validation"
|
||||
cp -r "$candidate" "${DOC_DIR}/gost-validation/latest"
|
||||
fi
|
||||
fi
|
||||
|
||||
shopt -s nullglob
|
||||
for pem in "$ROOT_DIR"/certificates/russian_trusted_*; do
|
||||
cp "$pem" "$TRUST_DIR/"
|
||||
done
|
||||
shopt -u nullglob
|
||||
|
||||
cat <<README >"${OUTPUT_ROOT}/README.txt"
|
||||
RootPack_RU bundle (${TIMESTAMP})
|
||||
--------------------------------
|
||||
Contents:
|
||||
- artifacts/ : Sovereign crypto plug-ins published for net10.0 (CryptoPro + PKCS#11)
|
||||
- config/rootpack_ru.crypto.yaml : example configuration binding registry profiles
|
||||
- docs/ : validation + audit documentation
|
||||
- trust/ : Russian trust anchor PEM bundle copied from certificates/
|
||||
|
||||
Usage:
|
||||
1. Review docs/rootpack_ru_package.md for installation steps.
|
||||
2. Execute scripts/crypto/run-rootpack-ru-tests.sh (or CI equivalent) and attach the logs to this bundle.
|
||||
3. Record hardware validation outputs per docs/rootpack_ru_validation.md and store alongside this directory.
|
||||
README
|
||||
|
||||
if [[ "${PACKAGE_TAR:-1}" != "0" ]]; then
|
||||
tarball="${OUTPUT_ROOT}.tar.gz"
|
||||
echo "[rootpack-ru] Creating ${tarball}"
|
||||
tar -czf "$tarball" -C "$(dirname "$OUTPUT_ROOT")" "$(basename "$OUTPUT_ROOT")"
|
||||
fi
|
||||
|
||||
echo "[rootpack-ru] Bundle staged under $OUTPUT_ROOT"
|
||||
25
deploy/tools/security/crypto/run-cryptopro-tests.ps1
Normal file
25
deploy/tools/security/crypto/run-cryptopro-tests.ps1
Normal file
@@ -0,0 +1,25 @@
|
||||
param(
|
||||
[string]$Configuration = "Release"
|
||||
)
|
||||
|
||||
if (-not $IsWindows) {
|
||||
Write-Host "CryptoPro tests require Windows" -ForegroundColor Yellow
|
||||
exit 0
|
||||
}
|
||||
|
||||
if (-not (Get-Command dotnet -ErrorAction SilentlyContinue)) {
|
||||
Write-Host "dotnet SDK not found" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Opt-in flag to avoid accidental runs on agents without CryptoPro CSP installed
|
||||
$env:STELLAOPS_CRYPTO_PRO_ENABLED = "1"
|
||||
|
||||
Write-Host "Running CryptoPro-only tests..." -ForegroundColor Cyan
|
||||
|
||||
pushd $PSScriptRoot\..\..
|
||||
try {
|
||||
dotnet test src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj -c $Configuration --filter CryptoProGostSignerTests
|
||||
} finally {
|
||||
popd
|
||||
}
|
||||
96
deploy/tools/security/crypto/run-rootpack-ru-tests.sh
Normal file
96
deploy/tools/security/crypto/run-rootpack-ru-tests.sh
Normal file
@@ -0,0 +1,96 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(git rev-parse --show-toplevel)"
|
||||
DEFAULT_LOG_ROOT="${ROOT_DIR}/logs/rootpack_ru_$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
LOG_ROOT="${ROOTPACK_LOG_DIR:-$DEFAULT_LOG_ROOT}"
|
||||
ALLOW_PARTIAL="${ALLOW_PARTIAL:-1}"
|
||||
mkdir -p "$LOG_ROOT"
|
||||
|
||||
PROJECTS=(
|
||||
"src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj"
|
||||
"src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj"
|
||||
"src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.csproj"
|
||||
)
|
||||
if [ "${RUN_SCANNER:-1}" != "1" ]; then
|
||||
PROJECTS=("${PROJECTS[0]}")
|
||||
echo "[rootpack-ru] RUN_SCANNER=0 set; skipping scanner test suites"
|
||||
fi
|
||||
|
||||
run_test() {
|
||||
local project="$1"
|
||||
local extra_props=""
|
||||
|
||||
if [ "${STELLAOPS_ENABLE_CRYPTO_PRO:-""}" = "1" ]; then
|
||||
extra_props+=" /p:StellaOpsEnableCryptoPro=true"
|
||||
fi
|
||||
|
||||
if [ "${STELLAOPS_ENABLE_PKCS11:-""}" = "1" ]; then
|
||||
extra_props+=" /p:StellaOpsEnablePkcs11=true"
|
||||
fi
|
||||
local safe_name
|
||||
safe_name="$(basename "${project%.csproj}")"
|
||||
local log_file="${LOG_ROOT}/${safe_name}.log"
|
||||
local trx_name="${safe_name}.trx"
|
||||
|
||||
echo "[rootpack-ru] Running tests for ${project}" | tee "$log_file"
|
||||
dotnet test "$project" \
|
||||
--nologo \
|
||||
--verbosity minimal \
|
||||
--results-directory "$LOG_ROOT" \
|
||||
--logger "trx;LogFileName=${trx_name}" ${extra_props} | tee -a "$log_file"
|
||||
}
|
||||
|
||||
PROJECT_SUMMARY=()
|
||||
for project in "${PROJECTS[@]}"; do
|
||||
safe_name="$(basename "${project%.csproj}")"
|
||||
if run_test "$project"; then
|
||||
PROJECT_SUMMARY+=("$project|$safe_name|PASS")
|
||||
echo "[rootpack-ru] Wrote logs for ${project} -> ${LOG_ROOT}/${safe_name}.log"
|
||||
else
|
||||
PROJECT_SUMMARY+=("$project|$safe_name|FAIL")
|
||||
echo "[rootpack-ru] Test run failed for ${project}; see ${LOG_ROOT}/${safe_name}.log"
|
||||
if [ "${ALLOW_PARTIAL}" != "1" ]; then
|
||||
echo "[rootpack-ru] ALLOW_PARTIAL=0; aborting harness."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
GOST_SUMMARY="skipped (docker not available)"
|
||||
if [ "${RUN_GOST_VALIDATION:-1}" = "1" ]; then
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
echo "[rootpack-ru] Running OpenSSL GOST validation harness"
|
||||
OPENSSL_GOST_LOG_DIR="${LOG_ROOT}/openssl_gost"
|
||||
if OPENSSL_GOST_LOG_DIR="${OPENSSL_GOST_LOG_DIR}" bash "${ROOT_DIR}/scripts/crypto/validate-openssl-gost.sh"; then
|
||||
if [ -d "${OPENSSL_GOST_LOG_DIR}" ] && [ -f "${OPENSSL_GOST_LOG_DIR}/summary.txt" ]; then
|
||||
GOST_SUMMARY="$(cat "${OPENSSL_GOST_LOG_DIR}/summary.txt")"
|
||||
else
|
||||
GOST_SUMMARY="completed (see logs/openssl_gost_validation_*)"
|
||||
fi
|
||||
else
|
||||
GOST_SUMMARY="failed (see logs/openssl_gost_validation_*)"
|
||||
fi
|
||||
else
|
||||
echo "[rootpack-ru] Docker not available; skipping OpenSSL GOST validation."
|
||||
fi
|
||||
fi
|
||||
|
||||
{
|
||||
echo "RootPack_RU deterministic test harness"
|
||||
echo "Generated: $(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
echo "Log Directory: $LOG_ROOT"
|
||||
echo ""
|
||||
echo "Projects:"
|
||||
for entry in "${PROJECT_SUMMARY[@]}"; do
|
||||
project_path="${entry%%|*}"
|
||||
rest="${entry#*|}"
|
||||
safe_name="${rest%%|*}"
|
||||
status="${rest##*|}"
|
||||
printf ' - %s (log: %s.log, trx: %s.trx) [%s]\n' "$project_path" "$safe_name" "$safe_name" "$status"
|
||||
done
|
||||
echo ""
|
||||
echo "GOST validation: ${GOST_SUMMARY}"
|
||||
} > "$LOG_ROOT/README.tests"
|
||||
|
||||
echo "Logs and TRX files available under $LOG_ROOT"
|
||||
42
deploy/tools/security/crypto/run-sim-smoke.ps1
Normal file
42
deploy/tools/security/crypto/run-sim-smoke.ps1
Normal file
@@ -0,0 +1,42 @@
|
||||
param(
|
||||
[string] $BaseUrl = "http://localhost:5000",
|
||||
[string] $SimProfile = "sm"
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$repoRoot = Resolve-Path "$PSScriptRoot/../.."
|
||||
|
||||
Push-Location $repoRoot
|
||||
$job = $null
|
||||
try {
|
||||
Write-Host "Building sim service and smoke harness..."
|
||||
dotnet build ops/crypto/sim-crypto-service/SimCryptoService.csproj -c Release | Out-Host
|
||||
dotnet build ops/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj -c Release | Out-Host
|
||||
|
||||
Write-Host "Starting sim service at $BaseUrl ..."
|
||||
$job = Start-Job -ArgumentList $repoRoot, $BaseUrl -ScriptBlock {
|
||||
param($path, $url)
|
||||
Set-Location $path
|
||||
$env:ASPNETCORE_URLS = $url
|
||||
dotnet run --project ops/crypto/sim-crypto-service/SimCryptoService.csproj --no-build -c Release
|
||||
}
|
||||
|
||||
Start-Sleep -Seconds 6
|
||||
|
||||
$env:STELLAOPS_CRYPTO_SIM_URL = $BaseUrl
|
||||
$env:SIM_PROFILE = $SimProfile
|
||||
Write-Host "Running smoke harness (profile=$SimProfile, url=$BaseUrl)..."
|
||||
dotnet run --project ops/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj --no-build -c Release
|
||||
$exitCode = $LASTEXITCODE
|
||||
if ($exitCode -ne 0) {
|
||||
throw "Smoke harness failed with exit code $exitCode"
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if ($job) {
|
||||
Stop-Job $job -ErrorAction SilentlyContinue | Out-Null
|
||||
Receive-Job $job -ErrorAction SilentlyContinue | Out-Null
|
||||
Remove-Job $job -ErrorAction SilentlyContinue | Out-Null
|
||||
}
|
||||
Pop-Location
|
||||
}
|
||||
108
deploy/tools/security/crypto/validate-openssl-gost.sh
Normal file
108
deploy/tools/security/crypto/validate-openssl-gost.sh
Normal file
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if ! command -v docker >/dev/null 2>&1; then
|
||||
echo "[gost-validate] docker is required but not found on PATH" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ROOT_DIR="$(git rev-parse --show-toplevel)"
|
||||
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
LOG_ROOT="${OPENSSL_GOST_LOG_DIR:-${ROOT_DIR}/logs/openssl_gost_validation_${TIMESTAMP}}"
|
||||
IMAGE="${OPENSSL_GOST_IMAGE:-rnix/openssl-gost:latest}"
|
||||
MOUNT_PATH="${LOG_ROOT}"
|
||||
|
||||
UNAME_OUT="$(uname -s || true)"
|
||||
case "${UNAME_OUT}" in
|
||||
MINGW*|MSYS*|CYGWIN*)
|
||||
if command -v wslpath >/dev/null 2>&1; then
|
||||
# Docker Desktop on Windows prefers Windows-style mount paths.
|
||||
MOUNT_PATH="$(wslpath -m "${LOG_ROOT}")"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
MOUNT_PATH="${LOG_ROOT}"
|
||||
;;
|
||||
esac
|
||||
|
||||
mkdir -p "${LOG_ROOT}"
|
||||
|
||||
cat >"${LOG_ROOT}/message.txt" <<'EOF'
|
||||
StellaOps OpenSSL GOST validation message (md_gost12_256)
|
||||
EOF
|
||||
|
||||
echo "[gost-validate] Using image ${IMAGE}"
|
||||
docker pull "${IMAGE}" >/dev/null
|
||||
|
||||
CONTAINER_SCRIPT_PATH="${LOG_ROOT}/container-script.sh"
|
||||
|
||||
cat > "${CONTAINER_SCRIPT_PATH}" <<'CONTAINER_SCRIPT'
|
||||
set -eu
|
||||
|
||||
MESSAGE="/out/message.txt"
|
||||
|
||||
openssl version -a > /out/openssl-version.txt
|
||||
openssl engine -c > /out/engine-list.txt
|
||||
|
||||
openssl genpkey -engine gost -algorithm gost2012_256 -pkeyopt paramset:A -out /tmp/gost.key.pem >/dev/null
|
||||
openssl pkey -engine gost -in /tmp/gost.key.pem -pubout -out /out/gost.pub.pem >/dev/null
|
||||
|
||||
DIGEST_LINE="$(openssl dgst -engine gost -md_gost12_256 "${MESSAGE}")"
|
||||
echo "${DIGEST_LINE}" > /out/digest.txt
|
||||
DIGEST="$(printf "%s" "${DIGEST_LINE}" | awk -F'= ' '{print $2}')"
|
||||
|
||||
openssl dgst -engine gost -md_gost12_256 -sign /tmp/gost.key.pem -out /tmp/signature1.bin "${MESSAGE}"
|
||||
openssl dgst -engine gost -md_gost12_256 -sign /tmp/gost.key.pem -out /tmp/signature2.bin "${MESSAGE}"
|
||||
|
||||
openssl dgst -engine gost -md_gost12_256 -verify /out/gost.pub.pem -signature /tmp/signature1.bin "${MESSAGE}" > /out/verify1.txt
|
||||
openssl dgst -engine gost -md_gost12_256 -verify /out/gost.pub.pem -signature /tmp/signature2.bin "${MESSAGE}" > /out/verify2.txt
|
||||
|
||||
SIG1_SHA="$(sha256sum /tmp/signature1.bin | awk '{print $1}')"
|
||||
SIG2_SHA="$(sha256sum /tmp/signature2.bin | awk '{print $1}')"
|
||||
MSG_SHA="$(sha256sum "${MESSAGE}" | awk '{print $1}')"
|
||||
|
||||
cp /tmp/signature1.bin /out/signature1.bin
|
||||
cp /tmp/signature2.bin /out/signature2.bin
|
||||
|
||||
DETERMINISTIC_BOOL=false
|
||||
DETERMINISTIC_LABEL="no"
|
||||
if [ "${SIG1_SHA}" = "${SIG2_SHA}" ]; then
|
||||
DETERMINISTIC_BOOL=true
|
||||
DETERMINISTIC_LABEL="yes"
|
||||
fi
|
||||
|
||||
cat > /out/summary.txt <<SUMMARY
|
||||
OpenSSL GOST validation (Linux engine)
|
||||
Image: ${VALIDATION_IMAGE:-unknown}
|
||||
Digest algorithm: md_gost12_256
|
||||
Message SHA256: ${MSG_SHA}
|
||||
Digest: ${DIGEST}
|
||||
Signature1 SHA256: ${SIG1_SHA}
|
||||
Signature2 SHA256: ${SIG2_SHA}
|
||||
Signatures deterministic: ${DETERMINISTIC_LABEL}
|
||||
SUMMARY
|
||||
|
||||
cat > /out/summary.json <<SUMMARYJSON
|
||||
{
|
||||
"image": "${VALIDATION_IMAGE:-unknown}",
|
||||
"digest_algorithm": "md_gost12_256",
|
||||
"message_sha256": "${MSG_SHA}",
|
||||
"digest": "${DIGEST}",
|
||||
"signature1_sha256": "${SIG1_SHA}",
|
||||
"signature2_sha256": "${SIG2_SHA}",
|
||||
"signatures_deterministic": ${DETERMINISTIC_BOOL}
|
||||
}
|
||||
SUMMARYJSON
|
||||
|
||||
CONTAINER_SCRIPT
|
||||
|
||||
docker run --rm \
|
||||
-e VALIDATION_IMAGE="${IMAGE}" \
|
||||
-v "${MOUNT_PATH}:/out" \
|
||||
"${IMAGE}" /bin/sh "/out/$(basename "${CONTAINER_SCRIPT_PATH}")"
|
||||
|
||||
rm -f "${CONTAINER_SCRIPT_PATH}"
|
||||
|
||||
echo "[gost-validate] Artifacts written to ${LOG_ROOT}"
|
||||
echo "[gost-validate] Summary:"
|
||||
cat "${LOG_ROOT}/summary.txt"
|
||||
130
deploy/tools/validation/check-channel-alignment.py
Normal file
130
deploy/tools/validation/check-channel-alignment.py
Normal file
@@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Ensure deployment bundles reference the images defined in a release manifest.
|
||||
|
||||
Usage:
|
||||
./deploy/tools/check-channel-alignment.py \
|
||||
--release deploy/releases/2025.10-edge.yaml \
|
||||
--target deploy/helm/stellaops/values-dev.yaml \
|
||||
--target deploy/compose/docker-compose.dev.yaml
|
||||
|
||||
For every target file, the script scans `image:` declarations and verifies that
|
||||
any image belonging to a repository listed in the release manifest matches the
|
||||
exact digest or tag recorded there. Images outside of the manifest (for example,
|
||||
supporting services such as `valkey`) are ignored.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, Iterable, List, Optional, Set
|
||||
|
||||
IMAGE_LINE = re.compile(r"^\s*image:\s*['\"]?(?P<image>\S+)['\"]?\s*$")
|
||||
|
||||
|
||||
def extract_images(path: pathlib.Path) -> List[str]:
|
||||
images: List[str] = []
|
||||
for line in path.read_text(encoding="utf-8").splitlines():
|
||||
match = IMAGE_LINE.match(line)
|
||||
if match:
|
||||
images.append(match.group("image"))
|
||||
return images
|
||||
|
||||
|
||||
def image_repo(image: str) -> str:
|
||||
if "@" in image:
|
||||
return image.split("@", 1)[0]
|
||||
# Split on the last colon to preserve registries with ports (e.g. localhost:5000)
|
||||
if ":" in image:
|
||||
prefix, tag = image.rsplit(":", 1)
|
||||
if "/" in tag:
|
||||
# handle digestive colon inside path (unlikely)
|
||||
return image
|
||||
return prefix
|
||||
return image
|
||||
|
||||
|
||||
def load_release_map(release_path: pathlib.Path) -> Dict[str, str]:
|
||||
release_map: Dict[str, str] = {}
|
||||
for image in extract_images(release_path):
|
||||
repo = image_repo(image)
|
||||
release_map[repo] = image
|
||||
return release_map
|
||||
|
||||
|
||||
def check_target(
|
||||
target_path: pathlib.Path,
|
||||
release_map: Dict[str, str],
|
||||
ignore_repos: Set[str],
|
||||
) -> List[str]:
|
||||
errors: List[str] = []
|
||||
for image in extract_images(target_path):
|
||||
repo = image_repo(image)
|
||||
if repo in ignore_repos:
|
||||
continue
|
||||
if repo not in release_map:
|
||||
continue
|
||||
expected = release_map[repo]
|
||||
if image != expected:
|
||||
errors.append(
|
||||
f"{target_path}: {image} does not match release value {expected}"
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
def parse_args(argv: Optional[Iterable[str]] = None) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"--release",
|
||||
required=True,
|
||||
type=pathlib.Path,
|
||||
help="Path to the release manifest (YAML)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target",
|
||||
action="append",
|
||||
required=True,
|
||||
type=pathlib.Path,
|
||||
help="Deployment profile to validate against the release manifest",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ignore-repo",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Repository prefix to ignore (may be repeated)",
|
||||
)
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: Optional[Iterable[str]] = None) -> int:
|
||||
args = parse_args(argv)
|
||||
|
||||
release_map = load_release_map(args.release)
|
||||
ignore_repos = {repo.rstrip("/") for repo in args.ignore_repo}
|
||||
|
||||
if not release_map:
|
||||
print(f"error: no images found in release manifest {args.release}", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
total_errors: List[str] = []
|
||||
for target in args.target:
|
||||
if not target.exists():
|
||||
total_errors.append(f"{target}: file not found")
|
||||
continue
|
||||
total_errors.extend(check_target(target, release_map, ignore_repos))
|
||||
|
||||
if total_errors:
|
||||
print("✖ channel alignment check failed:", file=sys.stderr)
|
||||
for err in total_errors:
|
||||
print(f" - {err}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
print("✓ deployment profiles reference release images for the inspected repositories.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
61
deploy/tools/validation/validate-profiles.sh
Normal file
61
deploy/tools/validation/validate-profiles.sh
Normal file
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
COMPOSE_DIR="$ROOT_DIR/compose"
|
||||
HELM_DIR="$ROOT_DIR/helm/stellaops"
|
||||
|
||||
compose_profiles=(
|
||||
"docker-compose.dev.yaml:env/dev.env.example"
|
||||
"docker-compose.stage.yaml:env/stage.env.example"
|
||||
"docker-compose.prod.yaml:env/prod.env.example"
|
||||
"docker-compose.airgap.yaml:env/airgap.env.example"
|
||||
"docker-compose.mirror.yaml:env/mirror.env.example"
|
||||
"docker-compose.telemetry.yaml:"
|
||||
"docker-compose.telemetry-storage.yaml:"
|
||||
)
|
||||
|
||||
docker_ready=false
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
if docker compose version >/dev/null 2>&1; then
|
||||
docker_ready=true
|
||||
else
|
||||
echo "⚠️ docker CLI present but Compose plugin unavailable; skipping compose validation" >&2
|
||||
fi
|
||||
else
|
||||
echo "⚠️ docker CLI not found; skipping compose validation" >&2
|
||||
fi
|
||||
|
||||
if [[ "$docker_ready" == "true" ]]; then
|
||||
for entry in "${compose_profiles[@]}"; do
|
||||
IFS=":" read -r compose_file env_file <<<"$entry"
|
||||
printf '→ validating %s with %s\n' "$compose_file" "$env_file"
|
||||
if [[ -n "$env_file" ]]; then
|
||||
docker compose \
|
||||
--env-file "$COMPOSE_DIR/$env_file" \
|
||||
-f "$COMPOSE_DIR/$compose_file" config >/dev/null
|
||||
else
|
||||
docker compose -f "$COMPOSE_DIR/$compose_file" config >/dev/null
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
helm_values=(
|
||||
"$HELM_DIR/values-dev.yaml"
|
||||
"$HELM_DIR/values-stage.yaml"
|
||||
"$HELM_DIR/values-prod.yaml"
|
||||
"$HELM_DIR/values-airgap.yaml"
|
||||
"$HELM_DIR/values-mirror.yaml"
|
||||
)
|
||||
|
||||
if command -v helm >/dev/null 2>&1; then
|
||||
for values in "${helm_values[@]}"; do
|
||||
printf '→ linting Helm chart with %s\n' "$(basename "$values")"
|
||||
helm lint "$HELM_DIR" -f "$values"
|
||||
helm template test-release "$HELM_DIR" -f "$values" >/dev/null
|
||||
done
|
||||
else
|
||||
echo "⚠️ helm CLI not found; skipping Helm lint/template" >&2
|
||||
fi
|
||||
|
||||
printf 'Profiles validated (where tooling was available).\n'
|
||||
183
deploy/tools/validation/validate_restore_sources.py
Normal file
183
deploy/tools/validation/validate_restore_sources.py
Normal file
@@ -0,0 +1,183 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Validate NuGet source ordering for StellaOps.
|
||||
|
||||
Ensures `local-nuget` is the highest priority feed in both NuGet.config and the
|
||||
Directory.Build.props restore configuration. Fails fast with actionable errors
|
||||
so CI/offline kit workflows can assert deterministic restore ordering.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import subprocess
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
NUGET_CONFIG = REPO_ROOT / "NuGet.config"
|
||||
ROOT_PROPS = REPO_ROOT / "Directory.Build.props"
|
||||
EXPECTED_SOURCE_KEYS = ["local", "dotnet-public", "nuget.org"]
|
||||
|
||||
|
||||
class ValidationError(Exception):
|
||||
"""Raised when validation fails."""
|
||||
|
||||
|
||||
def _fail(message: str) -> None:
|
||||
raise ValidationError(message)
|
||||
|
||||
|
||||
def _parse_xml(path: Path) -> ET.ElementTree:
|
||||
try:
|
||||
return ET.parse(path)
|
||||
except FileNotFoundError as exc:
|
||||
_fail(f"Missing required file: {path}")
|
||||
except ET.ParseError as exc:
|
||||
_fail(f"Could not parse XML for {path}: {exc}")
|
||||
|
||||
|
||||
def validate_nuget_config() -> None:
|
||||
tree = _parse_xml(NUGET_CONFIG)
|
||||
root = tree.getroot()
|
||||
|
||||
package_sources = root.find("packageSources")
|
||||
if package_sources is None:
|
||||
_fail("NuGet.config must declare a <packageSources> section.")
|
||||
|
||||
children = list(package_sources)
|
||||
if not children or children[0].tag != "clear":
|
||||
_fail("NuGet.config packageSources must begin with a <clear /> element.")
|
||||
|
||||
adds = [child for child in children if child.tag == "add"]
|
||||
if not adds:
|
||||
_fail("NuGet.config packageSources must define at least one <add> entry.")
|
||||
|
||||
keys = [add.attrib.get("key") for add in adds]
|
||||
if keys[: len(EXPECTED_SOURCE_KEYS)] != EXPECTED_SOURCE_KEYS:
|
||||
formatted = ", ".join(keys) or "<empty>"
|
||||
_fail(
|
||||
"NuGet.config packageSources must list feeds in the order "
|
||||
f"{EXPECTED_SOURCE_KEYS}. Found: {formatted}"
|
||||
)
|
||||
|
||||
local_value = adds[0].attrib.get("value", "")
|
||||
if Path(local_value).name != "local-nuget":
|
||||
_fail(
|
||||
"NuGet.config local feed should point at the repo-local mirror "
|
||||
f"'local-nuget', found value '{local_value}'."
|
||||
)
|
||||
|
||||
clear = package_sources.find("clear")
|
||||
if clear is None:
|
||||
_fail("NuGet.config packageSources must start with <clear /> to avoid inherited feeds.")
|
||||
|
||||
|
||||
def validate_directory_build_props() -> None:
|
||||
tree = _parse_xml(ROOT_PROPS)
|
||||
root = tree.getroot()
|
||||
defaults = None
|
||||
for element in root.findall(".//_StellaOpsDefaultRestoreSources"):
|
||||
defaults = [fragment.strip() for fragment in element.text.split(";") if fragment.strip()]
|
||||
break
|
||||
|
||||
if defaults is None:
|
||||
_fail("Directory.Build.props must define _StellaOpsDefaultRestoreSources.")
|
||||
|
||||
expected_props = [
|
||||
"$(StellaOpsLocalNuGetSource)",
|
||||
"$(StellaOpsDotNetPublicSource)",
|
||||
"$(StellaOpsNuGetOrgSource)",
|
||||
]
|
||||
if defaults != expected_props:
|
||||
_fail(
|
||||
"Directory.Build.props _StellaOpsDefaultRestoreSources must list feeds "
|
||||
f"in the order {expected_props}. Found: {defaults}"
|
||||
)
|
||||
|
||||
restore_nodes = root.findall(".//RestoreSources")
|
||||
if not restore_nodes:
|
||||
_fail("Directory.Build.props must override RestoreSources to force deterministic ordering.")
|
||||
|
||||
uses_default_first = any(
|
||||
node.text
|
||||
and node.text.strip().startswith("$(_StellaOpsDefaultRestoreSources)")
|
||||
for node in restore_nodes
|
||||
)
|
||||
if not uses_default_first:
|
||||
_fail(
|
||||
"Directory.Build.props RestoreSources override must place "
|
||||
"$(_StellaOpsDefaultRestoreSources) at the beginning."
|
||||
)
|
||||
|
||||
|
||||
def assert_single_nuget_config() -> None:
|
||||
extra_configs: list[Path] = []
|
||||
configs: set[Path] = set()
|
||||
for glob in ("NuGet.config", "nuget.config"):
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["rg", "--files", f"-g{glob}"],
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
cwd=REPO_ROOT,
|
||||
)
|
||||
except FileNotFoundError as exc:
|
||||
_fail("ripgrep (rg) is required for validation but was not found on PATH.")
|
||||
if result.returncode not in (0, 1):
|
||||
_fail(
|
||||
f"ripgrep failed while searching for {glob}: {result.stderr.strip() or result.returncode}"
|
||||
)
|
||||
for line in result.stdout.splitlines():
|
||||
configs.add((REPO_ROOT / line).resolve())
|
||||
|
||||
configs.discard(NUGET_CONFIG.resolve())
|
||||
extra_configs.extend(sorted(configs))
|
||||
if extra_configs:
|
||||
formatted = "\n ".join(str(path.relative_to(REPO_ROOT)) for path in extra_configs)
|
||||
_fail(
|
||||
"Unexpected additional NuGet.config files detected. "
|
||||
"Consolidate feed configuration in the repo root:\n "
|
||||
f"{formatted}"
|
||||
)
|
||||
|
||||
|
||||
def parse_args(argv: list[str]) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Verify StellaOps NuGet feeds prioritise the local mirror."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-rg",
|
||||
action="store_true",
|
||||
help="Skip ripgrep discovery of extra NuGet.config files (useful for focused runs).",
|
||||
)
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: list[str]) -> int:
|
||||
args = parse_args(argv)
|
||||
validations = [
|
||||
("NuGet.config ordering", validate_nuget_config),
|
||||
("Directory.Build.props restore override", validate_directory_build_props),
|
||||
]
|
||||
if not args.skip_rg:
|
||||
validations.append(("single NuGet.config", assert_single_nuget_config))
|
||||
|
||||
for label, check in validations:
|
||||
try:
|
||||
check()
|
||||
except ValidationError as exc:
|
||||
sys.stderr.write(f"[FAIL] {label}: {exc}\n")
|
||||
return 1
|
||||
else:
|
||||
sys.stdout.write(f"[OK] {label}\n")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
Reference in New Issue
Block a user