up
This commit is contained in:
220
scripts/crypto/download-cryptopro-playwright.cjs
Normal file
220
scripts/crypto/download-cryptopro-playwright.cjs
Normal file
@@ -0,0 +1,220 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* CryptoPro CSP downloader (Playwright-driven).
|
||||
*
|
||||
* Navigates cryptopro.ru downloads page, optionally fills login form, and selects
|
||||
* Linux packages (.rpm/.deb/.tar.gz/.tgz/.bin) under the CSP Linux section.
|
||||
*
|
||||
* Environment:
|
||||
* - CRYPTOPRO_URL (default: https://cryptopro.ru/products/csp/downloads#latest_csp50r3_linux)
|
||||
* - CRYPTOPRO_EMAIL / CRYPTOPRO_PASSWORD (default demo creds: contact@stella-ops.org / Hoko33JD3nj3aJD.)
|
||||
* - CRYPTOPRO_DRY_RUN (default: 1) -> list candidates, do not download
|
||||
* - CRYPTOPRO_OUTPUT_DIR (default: /opt/cryptopro/downloads)
|
||||
* - CRYPTOPRO_OUTPUT_FILE (optional: force a specific output filename/path)
|
||||
* - CRYPTOPRO_UNPACK (default: 0) -> attempt to unpack tar.gz/tgz/rpm/deb
|
||||
*/
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { spawnSync } = require('child_process');
|
||||
const { chromium } = require('playwright-chromium');
|
||||
|
||||
const url = process.env.CRYPTOPRO_URL || 'https://cryptopro.ru/products/csp/downloads#latest_csp50r3_linux';
|
||||
const email = process.env.CRYPTOPRO_EMAIL || 'contact@stella-ops.org';
|
||||
const password = process.env.CRYPTOPRO_PASSWORD || 'Hoko33JD3nj3aJD.';
|
||||
const dryRun = (process.env.CRYPTOPRO_DRY_RUN || '1') !== '0';
|
||||
const outputDir = process.env.CRYPTOPRO_OUTPUT_DIR || '/opt/cryptopro/downloads';
|
||||
const outputFile = process.env.CRYPTOPRO_OUTPUT_FILE;
|
||||
const unpack = (process.env.CRYPTOPRO_UNPACK || '0') === '1';
|
||||
const navTimeout = parseInt(process.env.CRYPTOPRO_NAV_TIMEOUT || '60000', 10);
|
||||
|
||||
const linuxPattern = /\.(rpm|deb|tar\.gz|tgz|bin)(\?|$)/i;
|
||||
const debugLinks = (process.env.CRYPTOPRO_DEBUG || '0') === '1';
|
||||
|
||||
function log(msg) {
|
||||
process.stdout.write(`${msg}\n`);
|
||||
}
|
||||
|
||||
function warn(msg) {
|
||||
process.stderr.write(`[WARN] ${msg}\n`);
|
||||
}
|
||||
|
||||
async function maybeLogin(page) {
|
||||
const emailSelector = 'input[type="email"], input[name*="email" i], input[name*="login" i], input[name="name"]';
|
||||
const passwordSelector = 'input[type="password"], input[name*="password" i]';
|
||||
const submitSelector = 'button[type="submit"], input[type="submit"]';
|
||||
|
||||
const emailInput = await page.$(emailSelector);
|
||||
const passwordInput = await page.$(passwordSelector);
|
||||
if (emailInput && passwordInput) {
|
||||
log('[login] Form detected; submitting credentials');
|
||||
await emailInput.fill(email);
|
||||
await passwordInput.fill(password);
|
||||
const submit = await page.$(submitSelector);
|
||||
if (submit) {
|
||||
await Promise.all([
|
||||
page.waitForNavigation({ waitUntil: 'networkidle', timeout: 15000 }).catch(() => {}),
|
||||
submit.click()
|
||||
]);
|
||||
} else {
|
||||
await passwordInput.press('Enter');
|
||||
await page.waitForTimeout(2000);
|
||||
}
|
||||
} else {
|
||||
log('[login] No login form detected; continuing anonymously');
|
||||
}
|
||||
}
|
||||
|
||||
async function findLinuxLinks(page) {
|
||||
const targets = [page, ...page.frames()];
|
||||
const hrefs = [];
|
||||
|
||||
// Collect href/data-href/data-url across main page + frames
|
||||
for (const target of targets) {
|
||||
try {
|
||||
const collected = await target.$$eval('a[href], [data-href], [data-url]', (els) =>
|
||||
els
|
||||
.map((el) => el.getAttribute('href') || el.getAttribute('data-href') || el.getAttribute('data-url'))
|
||||
.filter((href) => typeof href === 'string')
|
||||
);
|
||||
hrefs.push(...collected);
|
||||
} catch (err) {
|
||||
warn(`[scan] Failed to collect links from frame: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
const unique = Array.from(new Set(hrefs));
|
||||
return unique.filter((href) => linuxPattern.test(href));
|
||||
}
|
||||
|
||||
function unpackIfSupported(filePath) {
|
||||
if (!unpack) {
|
||||
return;
|
||||
}
|
||||
const cwd = path.dirname(filePath);
|
||||
if (filePath.endsWith('.tar.gz') || filePath.endsWith('.tgz')) {
|
||||
const res = spawnSync('tar', ['-xzf', filePath, '-C', cwd], { stdio: 'inherit' });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Extracted ${filePath}`);
|
||||
} else {
|
||||
warn(`[unpack] Failed to extract ${filePath}`);
|
||||
}
|
||||
} else if (filePath.endsWith('.rpm')) {
|
||||
const res = spawnSync('bash', ['-lc', `rpm2cpio "${filePath}" | cpio -idmv`], { stdio: 'inherit', cwd });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Extracted RPM ${filePath}`);
|
||||
} else {
|
||||
warn(`[unpack] Failed to extract RPM ${filePath}`);
|
||||
}
|
||||
} else if (filePath.endsWith('.deb')) {
|
||||
const res = spawnSync('dpkg-deb', ['-x', filePath, cwd], { stdio: 'inherit' });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Extracted DEB ${filePath}`);
|
||||
} else {
|
||||
warn(`[unpack] Failed to extract DEB ${filePath}`);
|
||||
}
|
||||
} else if (filePath.endsWith('.bin')) {
|
||||
const res = spawnSync('chmod', ['+x', filePath], { stdio: 'inherit' });
|
||||
if (res.status === 0) {
|
||||
log(`[unpack] Marked ${filePath} as executable (self-extract expected)`);
|
||||
} else {
|
||||
warn(`[unpack] Could not mark ${filePath} executable`);
|
||||
}
|
||||
} else {
|
||||
warn(`[unpack] Skipping unsupported archive type for ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
if (email === 'contact@stella-ops.org' && password === 'Hoko33JD3nj3aJD.') {
|
||||
warn('Using default demo credentials; set CRYPTOPRO_EMAIL/CRYPTOPRO_PASSWORD to real customer creds.');
|
||||
}
|
||||
|
||||
const browser = await chromium.launch({ headless: true });
|
||||
const context = await browser.newContext({
|
||||
acceptDownloads: true,
|
||||
httpCredentials: { username: email, password }
|
||||
});
|
||||
const page = await context.newPage();
|
||||
log(`[nav] Opening ${url}`);
|
||||
try {
|
||||
await page.goto(url, { waitUntil: 'networkidle', timeout: navTimeout });
|
||||
} catch (err) {
|
||||
warn(`[nav] Navigation at networkidle failed (${err.message}); retrying with waitUntil=load`);
|
||||
await page.goto(url, { waitUntil: 'load', timeout: navTimeout });
|
||||
}
|
||||
log(`[nav] Landed on ${page.url()}`);
|
||||
await maybeLogin(page);
|
||||
await page.waitForTimeout(2000);
|
||||
|
||||
const loginGate =
|
||||
page.url().includes('/user') ||
|
||||
(await page.$('form#user-login, form[id*="user-login"], .captcha, #captcha-container'));
|
||||
if (loginGate) {
|
||||
warn('[auth] Login/captcha gate detected on downloads page; automated fetch blocked. Provide session/cookies or run headful to solve manually.');
|
||||
await browser.close();
|
||||
return 2;
|
||||
}
|
||||
|
||||
let links = await findLinuxLinks(page);
|
||||
if (links.length === 0) {
|
||||
await page.waitForTimeout(1500);
|
||||
await page.evaluate(() => window.scrollTo(0, document.body.scrollHeight));
|
||||
await page.waitForTimeout(2000);
|
||||
links = await findLinuxLinks(page);
|
||||
}
|
||||
if (links.length === 0) {
|
||||
if (debugLinks) {
|
||||
const targetDir = outputFile ? path.dirname(outputFile) : outputDir;
|
||||
await fs.promises.mkdir(targetDir, { recursive: true });
|
||||
const debugHtml = path.join(targetDir, 'cryptopro-download-page.html');
|
||||
await fs.promises.writeFile(debugHtml, await page.content(), 'utf8');
|
||||
log(`[debug] Saved page HTML to ${debugHtml}`);
|
||||
const allLinks = await page.$$eval('a[href], [data-href], [data-url]', (els) =>
|
||||
els
|
||||
.map((el) => el.getAttribute('href') || el.getAttribute('data-href') || el.getAttribute('data-url'))
|
||||
.filter((href) => typeof href === 'string')
|
||||
);
|
||||
log(`[debug] Total link-like attributes: ${allLinks.length}`);
|
||||
allLinks.slice(0, 20).forEach((href, idx) => log(` [all ${idx + 1}] ${href}`));
|
||||
}
|
||||
warn('No Linux download links found on page.');
|
||||
await browser.close();
|
||||
return 1;
|
||||
}
|
||||
|
||||
log(`[scan] Found ${links.length} Linux candidate links`);
|
||||
links.slice(0, 10).forEach((href, idx) => log(` [${idx + 1}] ${href}`));
|
||||
|
||||
if (dryRun) {
|
||||
log('[mode] Dry-run enabled; not downloading. Set CRYPTOPRO_DRY_RUN=0 to fetch.');
|
||||
await browser.close();
|
||||
return 0;
|
||||
}
|
||||
|
||||
const target = links[0];
|
||||
log(`[download] Fetching ${target}`);
|
||||
const [download] = await Promise.all([
|
||||
page.waitForEvent('download', { timeout: 30000 }),
|
||||
page.goto(target).catch(() => page.click(`a[href="${target}"]`).catch(() => {}))
|
||||
]);
|
||||
|
||||
const targetDir = outputFile ? path.dirname(outputFile) : outputDir;
|
||||
await fs.promises.mkdir(targetDir, { recursive: true });
|
||||
const suggested = download.suggestedFilename();
|
||||
const outPath = outputFile ? outputFile : path.join(outputDir, suggested);
|
||||
await download.saveAs(outPath);
|
||||
log(`[download] Saved to ${outPath}`);
|
||||
|
||||
unpackIfSupported(outPath);
|
||||
|
||||
await browser.close();
|
||||
return 0;
|
||||
}
|
||||
|
||||
main()
|
||||
.then((code) => process.exit(code))
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -27,6 +27,18 @@ cp docs/security/crypto-routing-audit-2025-11-07.md "$DOC_DIR/"
|
||||
cp docs/security/rootpack_ru_package.md "$DOC_DIR/"
|
||||
cp etc/rootpack/ru/crypto.profile.yaml "$CONFIG_DIR/rootpack_ru.crypto.yaml"
|
||||
|
||||
if [ "${INCLUDE_GOST_VALIDATION:-1}" != "0" ]; then
|
||||
candidate="${OPENSSL_GOST_LOG_DIR:-}"
|
||||
if [ -z "$candidate" ]; then
|
||||
candidate="$(ls -d "${ROOT_DIR}"/logs/openssl_gost_validation_* "${ROOT_DIR}"/logs/rootpack_ru_*/openssl_gost 2>/dev/null | sort | tail -n 1 || true)"
|
||||
fi
|
||||
|
||||
if [ -n "$candidate" ] && [ -d "$candidate" ]; then
|
||||
mkdir -p "${DOC_DIR}/gost-validation"
|
||||
cp -r "$candidate" "${DOC_DIR}/gost-validation/latest"
|
||||
fi
|
||||
fi
|
||||
|
||||
shopt -s nullglob
|
||||
for pem in "$ROOT_DIR"/certificates/russian_trusted_*; do
|
||||
cp "$pem" "$TRUST_DIR/"
|
||||
|
||||
@@ -4,6 +4,7 @@ set -euo pipefail
|
||||
ROOT_DIR="$(git rev-parse --show-toplevel)"
|
||||
DEFAULT_LOG_ROOT="${ROOT_DIR}/logs/rootpack_ru_$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
LOG_ROOT="${ROOTPACK_LOG_DIR:-$DEFAULT_LOG_ROOT}"
|
||||
ALLOW_PARTIAL="${ALLOW_PARTIAL:-1}"
|
||||
mkdir -p "$LOG_ROOT"
|
||||
|
||||
PROJECTS=(
|
||||
@@ -11,6 +12,10 @@ PROJECTS=(
|
||||
"src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj"
|
||||
"src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.csproj"
|
||||
)
|
||||
if [ "${RUN_SCANNER:-1}" != "1" ]; then
|
||||
PROJECTS=("${PROJECTS[0]}")
|
||||
echo "[rootpack-ru] RUN_SCANNER=0 set; skipping scanner test suites"
|
||||
fi
|
||||
|
||||
run_test() {
|
||||
local project="$1"
|
||||
@@ -38,11 +43,38 @@ run_test() {
|
||||
|
||||
PROJECT_SUMMARY=()
|
||||
for project in "${PROJECTS[@]}"; do
|
||||
run_test "$project"
|
||||
safe_name="$(basename "${project%.csproj}")"
|
||||
PROJECT_SUMMARY+=("$project|$safe_name")
|
||||
echo "[rootpack-ru] Wrote logs for ${project} -> ${LOG_ROOT}/${safe_name}.log"
|
||||
done
|
||||
if run_test "$project"; then
|
||||
PROJECT_SUMMARY+=("$project|$safe_name|PASS")
|
||||
echo "[rootpack-ru] Wrote logs for ${project} -> ${LOG_ROOT}/${safe_name}.log"
|
||||
else
|
||||
PROJECT_SUMMARY+=("$project|$safe_name|FAIL")
|
||||
echo "[rootpack-ru] Test run failed for ${project}; see ${LOG_ROOT}/${safe_name}.log"
|
||||
if [ "${ALLOW_PARTIAL}" != "1" ]; then
|
||||
echo "[rootpack-ru] ALLOW_PARTIAL=0; aborting harness."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
GOST_SUMMARY="skipped (docker not available)"
|
||||
if [ "${RUN_GOST_VALIDATION:-1}" = "1" ]; then
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
echo "[rootpack-ru] Running OpenSSL GOST validation harness"
|
||||
OPENSSL_GOST_LOG_DIR="${LOG_ROOT}/openssl_gost"
|
||||
if OPENSSL_GOST_LOG_DIR="${OPENSSL_GOST_LOG_DIR}" bash "${ROOT_DIR}/scripts/crypto/validate-openssl-gost.sh"; then
|
||||
if [ -d "${OPENSSL_GOST_LOG_DIR}" ] && [ -f "${OPENSSL_GOST_LOG_DIR}/summary.txt" ]; then
|
||||
GOST_SUMMARY="$(cat "${OPENSSL_GOST_LOG_DIR}/summary.txt")"
|
||||
else
|
||||
GOST_SUMMARY="completed (see logs/openssl_gost_validation_*)"
|
||||
fi
|
||||
else
|
||||
GOST_SUMMARY="failed (see logs/openssl_gost_validation_*)"
|
||||
fi
|
||||
else
|
||||
echo "[rootpack-ru] Docker not available; skipping OpenSSL GOST validation."
|
||||
fi
|
||||
fi
|
||||
|
||||
{
|
||||
echo "RootPack_RU deterministic test harness"
|
||||
@@ -52,9 +84,13 @@ done
|
||||
echo "Projects:"
|
||||
for entry in "${PROJECT_SUMMARY[@]}"; do
|
||||
project_path="${entry%%|*}"
|
||||
safe_name="${entry##*|}"
|
||||
printf ' - %s (log: %s.log, trx: %s.trx)\n' "$project_path" "$safe_name" "$safe_name"
|
||||
rest="${entry#*|}"
|
||||
safe_name="${rest%%|*}"
|
||||
status="${rest##*|}"
|
||||
printf ' - %s (log: %s.log, trx: %s.trx) [%s]\n' "$project_path" "$safe_name" "$safe_name" "$status"
|
||||
done
|
||||
echo ""
|
||||
echo "GOST validation: ${GOST_SUMMARY}"
|
||||
} > "$LOG_ROOT/README.tests"
|
||||
|
||||
echo "Logs and TRX files available under $LOG_ROOT"
|
||||
|
||||
108
scripts/crypto/validate-openssl-gost.sh
Executable file
108
scripts/crypto/validate-openssl-gost.sh
Executable file
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if ! command -v docker >/dev/null 2>&1; then
|
||||
echo "[gost-validate] docker is required but not found on PATH" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ROOT_DIR="$(git rev-parse --show-toplevel)"
|
||||
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
LOG_ROOT="${OPENSSL_GOST_LOG_DIR:-${ROOT_DIR}/logs/openssl_gost_validation_${TIMESTAMP}}"
|
||||
IMAGE="${OPENSSL_GOST_IMAGE:-rnix/openssl-gost:latest}"
|
||||
MOUNT_PATH="${LOG_ROOT}"
|
||||
|
||||
UNAME_OUT="$(uname -s || true)"
|
||||
case "${UNAME_OUT}" in
|
||||
MINGW*|MSYS*|CYGWIN*)
|
||||
if command -v wslpath >/dev/null 2>&1; then
|
||||
# Docker Desktop on Windows prefers Windows-style mount paths.
|
||||
MOUNT_PATH="$(wslpath -m "${LOG_ROOT}")"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
MOUNT_PATH="${LOG_ROOT}"
|
||||
;;
|
||||
esac
|
||||
|
||||
mkdir -p "${LOG_ROOT}"
|
||||
|
||||
cat >"${LOG_ROOT}/message.txt" <<'EOF'
|
||||
StellaOps OpenSSL GOST validation message (md_gost12_256)
|
||||
EOF
|
||||
|
||||
echo "[gost-validate] Using image ${IMAGE}"
|
||||
docker pull "${IMAGE}" >/dev/null
|
||||
|
||||
CONTAINER_SCRIPT_PATH="${LOG_ROOT}/container-script.sh"
|
||||
|
||||
cat > "${CONTAINER_SCRIPT_PATH}" <<'CONTAINER_SCRIPT'
|
||||
set -eu
|
||||
|
||||
MESSAGE="/out/message.txt"
|
||||
|
||||
openssl version -a > /out/openssl-version.txt
|
||||
openssl engine -c > /out/engine-list.txt
|
||||
|
||||
openssl genpkey -engine gost -algorithm gost2012_256 -pkeyopt paramset:A -out /tmp/gost.key.pem >/dev/null
|
||||
openssl pkey -engine gost -in /tmp/gost.key.pem -pubout -out /out/gost.pub.pem >/dev/null
|
||||
|
||||
DIGEST_LINE="$(openssl dgst -engine gost -md_gost12_256 "${MESSAGE}")"
|
||||
echo "${DIGEST_LINE}" > /out/digest.txt
|
||||
DIGEST="$(printf "%s" "${DIGEST_LINE}" | awk -F'= ' '{print $2}')"
|
||||
|
||||
openssl dgst -engine gost -md_gost12_256 -sign /tmp/gost.key.pem -out /tmp/signature1.bin "${MESSAGE}"
|
||||
openssl dgst -engine gost -md_gost12_256 -sign /tmp/gost.key.pem -out /tmp/signature2.bin "${MESSAGE}"
|
||||
|
||||
openssl dgst -engine gost -md_gost12_256 -verify /out/gost.pub.pem -signature /tmp/signature1.bin "${MESSAGE}" > /out/verify1.txt
|
||||
openssl dgst -engine gost -md_gost12_256 -verify /out/gost.pub.pem -signature /tmp/signature2.bin "${MESSAGE}" > /out/verify2.txt
|
||||
|
||||
SIG1_SHA="$(sha256sum /tmp/signature1.bin | awk '{print $1}')"
|
||||
SIG2_SHA="$(sha256sum /tmp/signature2.bin | awk '{print $1}')"
|
||||
MSG_SHA="$(sha256sum "${MESSAGE}" | awk '{print $1}')"
|
||||
|
||||
cp /tmp/signature1.bin /out/signature1.bin
|
||||
cp /tmp/signature2.bin /out/signature2.bin
|
||||
|
||||
DETERMINISTIC_BOOL=false
|
||||
DETERMINISTIC_LABEL="no"
|
||||
if [ "${SIG1_SHA}" = "${SIG2_SHA}" ]; then
|
||||
DETERMINISTIC_BOOL=true
|
||||
DETERMINISTIC_LABEL="yes"
|
||||
fi
|
||||
|
||||
cat > /out/summary.txt <<SUMMARY
|
||||
OpenSSL GOST validation (Linux engine)
|
||||
Image: ${VALIDATION_IMAGE:-unknown}
|
||||
Digest algorithm: md_gost12_256
|
||||
Message SHA256: ${MSG_SHA}
|
||||
Digest: ${DIGEST}
|
||||
Signature1 SHA256: ${SIG1_SHA}
|
||||
Signature2 SHA256: ${SIG2_SHA}
|
||||
Signatures deterministic: ${DETERMINISTIC_LABEL}
|
||||
SUMMARY
|
||||
|
||||
cat > /out/summary.json <<SUMMARYJSON
|
||||
{
|
||||
"image": "${VALIDATION_IMAGE:-unknown}",
|
||||
"digest_algorithm": "md_gost12_256",
|
||||
"message_sha256": "${MSG_SHA}",
|
||||
"digest": "${DIGEST}",
|
||||
"signature1_sha256": "${SIG1_SHA}",
|
||||
"signature2_sha256": "${SIG2_SHA}",
|
||||
"signatures_deterministic": ${DETERMINISTIC_BOOL}
|
||||
}
|
||||
SUMMARYJSON
|
||||
|
||||
CONTAINER_SCRIPT
|
||||
|
||||
docker run --rm \
|
||||
-e VALIDATION_IMAGE="${IMAGE}" \
|
||||
-v "${MOUNT_PATH}:/out" \
|
||||
"${IMAGE}" /bin/sh "/out/$(basename "${CONTAINER_SCRIPT_PATH}")"
|
||||
|
||||
rm -f "${CONTAINER_SCRIPT_PATH}"
|
||||
|
||||
echo "[gost-validate] Artifacts written to ${LOG_ROOT}"
|
||||
echo "[gost-validate] Summary:"
|
||||
cat "${LOG_ROOT}/summary.txt"
|
||||
Binary file not shown.
467
scripts/feeds/run_icscisa_kisa_refresh.py
Normal file
467
scripts/feeds/run_icscisa_kisa_refresh.py
Normal file
@@ -0,0 +1,467 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
ICS/KISA feed refresh runner.
|
||||
|
||||
Runs the SOP v0.2 workflow to emit NDJSON advisories, delta, fetch log, and hash
|
||||
manifest under out/feeds/icscisa-kisa/<YYYYMMDD>/.
|
||||
|
||||
Defaults to live fetch with offline-safe fallback to baked-in samples. You can
|
||||
force live/offline via env or CLI flags.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from html import unescape
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, List, Tuple
|
||||
from urllib.error import URLError, HTTPError
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
from urllib.request import Request, urlopen
|
||||
from xml.etree import ElementTree
|
||||
|
||||
|
||||
DEFAULT_OUTPUT_ROOT = Path("out/feeds/icscisa-kisa")
|
||||
DEFAULT_ICSCISA_URL = "https://www.cisa.gov/news-events/ics-advisories/icsa.xml"
|
||||
DEFAULT_KISA_URL = "https://knvd.krcert.or.kr/rss/securityInfo.do"
|
||||
DEFAULT_GATEWAY_HOST = "concelier-webservice"
|
||||
DEFAULT_GATEWAY_SCHEME = "http"
|
||||
USER_AGENT = "StellaOpsFeedRefresh/1.0 (+https://stella-ops.org)"
|
||||
|
||||
|
||||
def utcnow() -> dt.datetime:
|
||||
return dt.datetime.utcnow().replace(tzinfo=dt.timezone.utc)
|
||||
|
||||
|
||||
def iso(ts: dt.datetime) -> str:
|
||||
return ts.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
|
||||
def sha256_bytes(data: bytes) -> str:
|
||||
return hashlib.sha256(data).hexdigest()
|
||||
|
||||
|
||||
def strip_html(value: str) -> str:
|
||||
return re.sub(r"<[^>]+>", "", value or "").strip()
|
||||
|
||||
|
||||
def safe_request(url: str) -> bytes:
|
||||
req = Request(url, headers={"User-Agent": USER_AGENT})
|
||||
with urlopen(req, timeout=30) as resp:
|
||||
return resp.read()
|
||||
|
||||
|
||||
def parse_rss_items(xml_bytes: bytes) -> Iterable[Dict[str, str]]:
|
||||
root = ElementTree.fromstring(xml_bytes)
|
||||
for item in root.findall(".//item"):
|
||||
title = (item.findtext("title") or "").strip()
|
||||
link = (item.findtext("link") or "").strip()
|
||||
description = strip_html(unescape(item.findtext("description") or ""))
|
||||
pub_date = (item.findtext("pubDate") or "").strip()
|
||||
yield {
|
||||
"title": title,
|
||||
"link": link,
|
||||
"description": description,
|
||||
"pub_date": pub_date,
|
||||
}
|
||||
|
||||
|
||||
def normalize_icscisa_record(item: Dict[str, str], fetched_at: str, run_id: str) -> Dict[str, object]:
|
||||
advisory_id = item["title"].split(":")[0].strip() or "icsa-unknown"
|
||||
summary = item["description"] or item["title"]
|
||||
raw_payload = f"{item['title']}\n{item['link']}\n{item['description']}"
|
||||
record = {
|
||||
"advisory_id": advisory_id,
|
||||
"source": "icscisa",
|
||||
"source_url": item["link"] or DEFAULT_ICSCISA_URL,
|
||||
"title": item["title"] or advisory_id,
|
||||
"summary": summary,
|
||||
"published": iso(parse_pubdate(item["pub_date"])),
|
||||
"updated": iso(parse_pubdate(item["pub_date"])),
|
||||
"severity": "unknown",
|
||||
"cvss": None,
|
||||
"cwe": [],
|
||||
"affected_products": [],
|
||||
"references": [url for url in (item["link"],) if url],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": fetched_at,
|
||||
"run_id": run_id,
|
||||
"payload_sha256": sha256_bytes(raw_payload.encode("utf-8")),
|
||||
}
|
||||
return record
|
||||
|
||||
|
||||
def normalize_kisa_record(item: Dict[str, str], fetched_at: str, run_id: str) -> Dict[str, object]:
|
||||
advisory_id = extract_kisa_id(item)
|
||||
raw_payload = f"{item['title']}\n{item['link']}\n{item['description']}"
|
||||
record = {
|
||||
"advisory_id": advisory_id,
|
||||
"source": "kisa",
|
||||
"source_url": item["link"] or DEFAULT_KISA_URL,
|
||||
"title": item["title"] or advisory_id,
|
||||
"summary": item["description"] or item["title"],
|
||||
"published": iso(parse_pubdate(item["pub_date"])),
|
||||
"updated": iso(parse_pubdate(item["pub_date"])),
|
||||
"severity": "unknown",
|
||||
"cvss": None,
|
||||
"cwe": [],
|
||||
"affected_products": [],
|
||||
"references": [url for url in (item["link"], DEFAULT_KISA_URL) if url],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": fetched_at,
|
||||
"run_id": run_id,
|
||||
"payload_sha256": sha256_bytes(raw_payload.encode("utf-8")),
|
||||
}
|
||||
return record
|
||||
|
||||
|
||||
def extract_kisa_id(item: Dict[str, str]) -> str:
|
||||
link = item["link"]
|
||||
match = re.search(r"IDX=([0-9]+)", link)
|
||||
if match:
|
||||
return f"KISA-{match.group(1)}"
|
||||
return (item["title"].split()[0] if item["title"] else "KISA-unknown").strip()
|
||||
|
||||
|
||||
def parse_pubdate(value: str) -> dt.datetime:
|
||||
if not value:
|
||||
return utcnow()
|
||||
try:
|
||||
# RFC1123-ish
|
||||
return dt.datetime.strptime(value, "%a, %d %b %Y %H:%M:%S %Z").replace(tzinfo=dt.timezone.utc)
|
||||
except ValueError:
|
||||
try:
|
||||
return dt.datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return utcnow()
|
||||
|
||||
|
||||
def sample_records() -> List[Dict[str, object]]:
|
||||
now_iso = iso(utcnow())
|
||||
return [
|
||||
{
|
||||
"advisory_id": "ICSA-25-123-01",
|
||||
"source": "icscisa",
|
||||
"source_url": "https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01",
|
||||
"title": "Example ICS Advisory",
|
||||
"summary": "Example Corp ControlSuite RCE via exposed management service.",
|
||||
"published": "2025-10-13T12:00:00Z",
|
||||
"updated": "2025-11-30T00:00:00Z",
|
||||
"severity": "High",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", "score": 9.8},
|
||||
"cwe": ["CWE-269"],
|
||||
"affected_products": [{"vendor": "Example Corp", "product": "ControlSuite", "versions": ["4.2.0", "4.2.1"]}],
|
||||
"references": [
|
||||
"https://example.com/security/icsa-25-123-01.pdf",
|
||||
"https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"ICSA-25-123-01 Example ControlSuite advisory payload"),
|
||||
},
|
||||
{
|
||||
"advisory_id": "ICSMA-25-045-01",
|
||||
"source": "icscisa",
|
||||
"source_url": "https://www.cisa.gov/news-events/ics-medical-advisories/icsma-25-045-01",
|
||||
"title": "Example Medical Advisory",
|
||||
"summary": "HealthTech infusion pump vulnerabilities including two CVEs.",
|
||||
"published": "2025-10-14T09:30:00Z",
|
||||
"updated": "2025-12-01T00:00:00Z",
|
||||
"severity": "Medium",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:H/PR:L/UI:R/S:U/C:L/I:L/A:L", "score": 6.3},
|
||||
"cwe": ["CWE-319"],
|
||||
"affected_products": [{"vendor": "HealthTech", "product": "InfusionManager", "versions": ["2.1.0", "2.1.1"]}],
|
||||
"references": [
|
||||
"https://www.cisa.gov/news-events/ics-medical-advisories/icsma-25-045-01",
|
||||
"https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2025-11111",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"ICSMA-25-045-01 Example medical advisory payload"),
|
||||
},
|
||||
{
|
||||
"advisory_id": "KISA-2025-5859",
|
||||
"source": "kisa",
|
||||
"source_url": "https://knvd.krcert.or.kr/detailDos.do?IDX=5859",
|
||||
"title": "KISA sample advisory 5859",
|
||||
"summary": "Remote code execution in ControlBoard service (offline HTML snapshot).",
|
||||
"published": "2025-11-03T22:53:00Z",
|
||||
"updated": "2025-12-02T00:00:00Z",
|
||||
"severity": "High",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", "score": 9.8},
|
||||
"cwe": ["CWE-787"],
|
||||
"affected_products": [{"vendor": "ACME", "product": "ControlBoard", "versions": ["1.0.1.0084", "2.0.1.0034"]}],
|
||||
"references": [
|
||||
"https://knvd.krcert.or.kr/rss/securityInfo.do",
|
||||
"https://knvd.krcert.or.kr/detailDos.do?IDX=5859",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"KISA advisory IDX 5859 cached HTML payload"),
|
||||
},
|
||||
{
|
||||
"advisory_id": "KISA-2025-5860",
|
||||
"source": "kisa",
|
||||
"source_url": "https://knvd.krcert.or.kr/detailDos.do?IDX=5860",
|
||||
"title": "KISA sample advisory 5860",
|
||||
"summary": "Authentication bypass via default credentials in NetGateway appliance.",
|
||||
"published": "2025-11-03T22:53:00Z",
|
||||
"updated": "2025-12-02T00:00:00Z",
|
||||
"severity": "Medium",
|
||||
"cvss": {"version": "3.1", "vector": "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:L/I:L/A:L", "score": 7.3},
|
||||
"cwe": ["CWE-798"],
|
||||
"affected_products": [{"vendor": "NetGateway", "product": "Edge", "versions": ["3.4.2", "3.4.3"]}],
|
||||
"references": [
|
||||
"https://knvd.krcert.or.kr/rss/securityInfo.do",
|
||||
"https://knvd.krcert.or.kr/detailDos.do?IDX=5860",
|
||||
],
|
||||
"signature": {"status": "missing", "reason": "unsigned_source"},
|
||||
"fetched_at": now_iso,
|
||||
"run_id": "",
|
||||
"payload_sha256": sha256_bytes(b"KISA advisory IDX 5860 cached HTML payload"),
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def build_records(
|
||||
run_id: str,
|
||||
fetched_at: str,
|
||||
live_fetch: bool,
|
||||
offline_only: bool,
|
||||
icscisa_url: str,
|
||||
kisa_url: str,
|
||||
) -> Tuple[List[Dict[str, object]], Dict[str, str]]:
|
||||
samples = sample_records()
|
||||
sample_icscisa = [r for r in samples if r["source"] == "icscisa"]
|
||||
sample_kisa = [r for r in samples if r["source"] == "kisa"]
|
||||
status = {"icscisa": "offline", "kisa": "offline"}
|
||||
records: List[Dict[str, object]] = []
|
||||
|
||||
if live_fetch and not offline_only:
|
||||
try:
|
||||
icscisa_items = list(parse_rss_items(safe_request(icscisa_url)))
|
||||
for item in icscisa_items:
|
||||
records.append(normalize_icscisa_record(item, fetched_at, run_id))
|
||||
status["icscisa"] = f"live:{len(icscisa_items)}"
|
||||
except (URLError, HTTPError, ElementTree.ParseError, TimeoutError) as exc:
|
||||
print(f"[warn] ICS CISA fetch failed ({exc}); falling back to samples.", file=sys.stderr)
|
||||
|
||||
try:
|
||||
kisa_items = list(parse_rss_items(safe_request(kisa_url)))
|
||||
for item in kisa_items:
|
||||
records.append(normalize_kisa_record(item, fetched_at, run_id))
|
||||
status["kisa"] = f"live:{len(kisa_items)}"
|
||||
except (URLError, HTTPError, ElementTree.ParseError, TimeoutError) as exc:
|
||||
print(f"[warn] KISA fetch failed ({exc}); falling back to samples.", file=sys.stderr)
|
||||
|
||||
if not records or status["icscisa"].startswith("live") is False:
|
||||
records.extend(apply_run_metadata(sample_icscisa, run_id, fetched_at))
|
||||
status["icscisa"] = status.get("icscisa") or "offline"
|
||||
|
||||
if not any(r["source"] == "kisa" for r in records):
|
||||
records.extend(apply_run_metadata(sample_kisa, run_id, fetched_at))
|
||||
status["kisa"] = status.get("kisa") or "offline"
|
||||
|
||||
return records, status
|
||||
|
||||
|
||||
def apply_run_metadata(records: Iterable[Dict[str, object]], run_id: str, fetched_at: str) -> List[Dict[str, object]]:
|
||||
updated = []
|
||||
for record in records:
|
||||
copy = dict(record)
|
||||
copy["run_id"] = run_id
|
||||
copy["fetched_at"] = fetched_at
|
||||
copy["payload_sha256"] = record.get("payload_sha256") or sha256_bytes(json.dumps(record, sort_keys=True).encode("utf-8"))
|
||||
updated.append(copy)
|
||||
return updated
|
||||
|
||||
|
||||
def find_previous_snapshot(base_dir: Path, current_run_date: str) -> Path | None:
|
||||
if not base_dir.exists():
|
||||
return None
|
||||
candidates = sorted(p for p in base_dir.iterdir() if p.is_dir() and p.name != current_run_date)
|
||||
if not candidates:
|
||||
return None
|
||||
return candidates[-1] / "advisories.ndjson"
|
||||
|
||||
|
||||
def load_previous_hash(path: Path | None) -> str | None:
|
||||
if path and path.exists():
|
||||
return sha256_bytes(path.read_bytes())
|
||||
return None
|
||||
|
||||
|
||||
def compute_delta(new_records: List[Dict[str, object]], previous_path: Path | None) -> Dict[str, object]:
|
||||
prev_records = {}
|
||||
if previous_path and previous_path.exists():
|
||||
with previous_path.open("r", encoding="utf-8") as handle:
|
||||
for line in handle:
|
||||
if line.strip():
|
||||
rec = json.loads(line)
|
||||
prev_records[rec["advisory_id"]] = rec
|
||||
|
||||
new_by_id = {r["advisory_id"]: r for r in new_records}
|
||||
added = [rid for rid in new_by_id if rid not in prev_records]
|
||||
updated = [
|
||||
rid
|
||||
for rid, rec in new_by_id.items()
|
||||
if rid in prev_records and rec.get("payload_sha256") != prev_records[rid].get("payload_sha256")
|
||||
]
|
||||
removed = [rid for rid in prev_records if rid not in new_by_id]
|
||||
|
||||
return {
|
||||
"added": {"icscisa": [rid for rid in added if new_by_id[rid]["source"] == "icscisa"],
|
||||
"kisa": [rid for rid in added if new_by_id[rid]["source"] == "kisa"]},
|
||||
"updated": {"icscisa": [rid for rid in updated if new_by_id[rid]["source"] == "icscisa"],
|
||||
"kisa": [rid for rid in updated if new_by_id[rid]["source"] == "kisa"]},
|
||||
"removed": {"icscisa": [rid for rid in removed if prev_records[rid]["source"] == "icscisa"],
|
||||
"kisa": [rid for rid in removed if prev_records[rid]["source"] == "kisa"]},
|
||||
"totals": {
|
||||
"icscisa": {
|
||||
"added": len([rid for rid in added if new_by_id[rid]["source"] == "icscisa"]),
|
||||
"updated": len([rid for rid in updated if new_by_id[rid]["source"] == "icscisa"]),
|
||||
"removed": len([rid for rid in removed if prev_records[rid]["source"] == "icscisa"]),
|
||||
"remaining": len([rid for rid, rec in new_by_id.items() if rec["source"] == "icscisa"]),
|
||||
},
|
||||
"kisa": {
|
||||
"added": len([rid for rid in added if new_by_id[rid]["source"] == "kisa"]),
|
||||
"updated": len([rid for rid in updated if new_by_id[rid]["source"] == "kisa"]),
|
||||
"removed": len([rid for rid in removed if prev_records[rid]["source"] == "kisa"]),
|
||||
"remaining": len([rid for rid, rec in new_by_id.items() if rec["source"] == "kisa"]),
|
||||
},
|
||||
"overall": len(new_records),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def write_ndjson(records: List[Dict[str, object]], path: Path) -> None:
|
||||
path.write_text("\n".join(json.dumps(r, sort_keys=True, separators=(",", ":")) for r in records) + "\n", encoding="utf-8")
|
||||
|
||||
|
||||
def write_fetch_log(
|
||||
path: Path,
|
||||
run_id: str,
|
||||
start: str,
|
||||
end: str,
|
||||
status: Dict[str, str],
|
||||
gateway_host: str,
|
||||
gateway_scheme: str,
|
||||
icscisa_url: str,
|
||||
kisa_url: str,
|
||||
live_fetch: bool,
|
||||
offline_only: bool,
|
||||
) -> None:
|
||||
lines = [
|
||||
f"run_id={run_id} start={start} end={end}",
|
||||
f"sources=icscisa,kisa cadence=weekly backlog_window=60d live_fetch={str(live_fetch).lower()} offline_only={str(offline_only).lower()}",
|
||||
f"gateway={gateway_scheme}://{gateway_host}",
|
||||
f"icscisa_url={icscisa_url} status={status.get('icscisa','offline')} retries=0",
|
||||
f"kisa_url={kisa_url} status={status.get('kisa','offline')} retries=0",
|
||||
"outputs=advisories.ndjson,delta.json,hashes.sha256",
|
||||
]
|
||||
path.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
||||
|
||||
|
||||
def write_hashes(dir_path: Path) -> None:
|
||||
entries = []
|
||||
for name in ["advisories.ndjson", "delta.json", "fetch.log"]:
|
||||
file_path = dir_path / name
|
||||
entries.append(f"{sha256_bytes(file_path.read_bytes())} {name}")
|
||||
(dir_path / "hashes.sha256").write_text("\n".join(entries) + "\n", encoding="utf-8")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Run ICS/KISA feed refresh SOP v0.2")
|
||||
parser.add_argument("--out-dir", default=str(DEFAULT_OUTPUT_ROOT), help="Base output directory (default: out/feeds/icscisa-kisa)")
|
||||
parser.add_argument("--run-date", default=None, help="Override run date (YYYYMMDD)")
|
||||
parser.add_argument("--run-id", default=None, help="Override run id")
|
||||
parser.add_argument("--live", action="store_true", default=False, help="Force live fetch (default: enabled via env LIVE_FETCH=true)")
|
||||
parser.add_argument("--offline", action="store_true", default=False, help="Force offline samples only")
|
||||
args = parser.parse_args()
|
||||
|
||||
now = utcnow()
|
||||
run_date = args.run_date or now.strftime("%Y%m%d")
|
||||
run_id = args.run_id or f"icscisa-kisa-{now.strftime('%Y%m%dT%H%M%SZ')}"
|
||||
fetched_at = iso(now)
|
||||
start = fetched_at
|
||||
|
||||
live_fetch = args.live or os.getenv("LIVE_FETCH", "true").lower() == "true"
|
||||
offline_only = args.offline or os.getenv("OFFLINE_SNAPSHOT", "false").lower() == "true"
|
||||
|
||||
output_root = Path(args.out_dir)
|
||||
output_dir = output_root / run_date
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
previous_path = find_previous_snapshot(output_root, run_date)
|
||||
|
||||
gateway_host = os.getenv("FEED_GATEWAY_HOST", DEFAULT_GATEWAY_HOST)
|
||||
gateway_scheme = os.getenv("FEED_GATEWAY_SCHEME", DEFAULT_GATEWAY_SCHEME)
|
||||
|
||||
def resolve_feed(url_env: str, default_url: str) -> str:
|
||||
if url_env:
|
||||
return url_env
|
||||
parsed = urlparse(default_url)
|
||||
# Replace host/scheme to allow on-prem DNS (docker network) defaults.
|
||||
rewritten = parsed._replace(netloc=gateway_host, scheme=gateway_scheme)
|
||||
return urlunparse(rewritten)
|
||||
|
||||
resolved_icscisa_url = resolve_feed(os.getenv("ICSCISA_FEED_URL"), DEFAULT_ICSCISA_URL)
|
||||
resolved_kisa_url = resolve_feed(os.getenv("KISA_FEED_URL"), DEFAULT_KISA_URL)
|
||||
|
||||
records, status = build_records(
|
||||
run_id=run_id,
|
||||
fetched_at=fetched_at,
|
||||
live_fetch=live_fetch,
|
||||
offline_only=offline_only,
|
||||
icscisa_url=resolved_icscisa_url,
|
||||
kisa_url=resolved_kisa_url,
|
||||
)
|
||||
|
||||
write_ndjson(records, output_dir / "advisories.ndjson")
|
||||
|
||||
delta = compute_delta(records, previous_path)
|
||||
delta_payload = {
|
||||
"run_id": run_id,
|
||||
"generated_at": iso(utcnow()),
|
||||
**delta,
|
||||
"previous_snapshot_sha256": load_previous_hash(previous_path),
|
||||
}
|
||||
(output_dir / "delta.json").write_text(json.dumps(delta_payload, separators=(",", ":")) + "\n", encoding="utf-8")
|
||||
|
||||
end = iso(utcnow())
|
||||
write_fetch_log(
|
||||
output_dir / "fetch.log",
|
||||
run_id,
|
||||
start,
|
||||
end,
|
||||
status,
|
||||
gateway_host=gateway_host,
|
||||
gateway_scheme=gateway_scheme,
|
||||
icscisa_url=resolved_icscisa_url,
|
||||
kisa_url=resolved_kisa_url,
|
||||
live_fetch=live_fetch and not offline_only,
|
||||
offline_only=offline_only,
|
||||
)
|
||||
write_hashes(output_dir)
|
||||
|
||||
print(f"[ok] wrote {len(records)} advisories to {output_dir}")
|
||||
print(f" run_id={run_id} live_fetch={live_fetch and not offline_only} offline_only={offline_only}")
|
||||
print(f" gateway={gateway_scheme}://{gateway_host}")
|
||||
print(f" icscisa_url={resolved_icscisa_url}")
|
||||
print(f" kisa_url={resolved_kisa_url}")
|
||||
print(f" status={status}")
|
||||
if previous_path:
|
||||
print(f" previous_snapshot={previous_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -7,6 +7,8 @@
|
||||
- `verify_oci_layout.py`: validates OCI layout/index/manifest and blob digests when `OCI=1` is used.
|
||||
- `mirror-create.sh`: convenience wrapper to build + verify thin bundles (optional SIGN_KEY, time anchor, OCI flag).
|
||||
- `mirror-verify.sh`: wrapper around `verify_thin_bundle.py` for quick hash/DSSE checks.
|
||||
- `schedule-export-center-run.sh`: schedules an Export Center run for mirror bundles via HTTP POST; set `EXPORT_CENTER_BASE_URL`, `EXPORT_CENTER_TENANT`, `EXPORT_CENTER_TOKEN` (Bearer), optional `EXPORT_CENTER_PROJECT`; logs to `AUDIT_LOG_PATH` (default `logs/export-center-schedule.log`).
|
||||
- `schedule-export-center-run.sh`: schedules an Export Center run for mirror bundles via HTTP POST; set `EXPORT_CENTER_BASE_URL`, `EXPORT_CENTER_TENANT`, `EXPORT_CENTER_TOKEN` (Bearer), optional `EXPORT_CENTER_PROJECT`; logs to `AUDIT_LOG_PATH` (default `logs/export-center-schedule.log`). Set `EXPORT_CENTER_ARTIFACTS_JSON` to inject bundle metadata into the request payload.
|
||||
- `export-center-wire.sh`: builds `export-center-handoff.json` from `out/mirror/thin/milestone.json`, emits recommended Export Center targets, and (when `EXPORT_CENTER_AUTO_SCHEDULE=1`) calls `schedule-export-center-run.sh` to push the run. Outputs live under `out/mirror/thin/export-center/`.
|
||||
- CI: `.gitea/workflows/mirror-sign.yml` runs this script after signing; scheduling remains opt-in via secrets `EXPORT_CENTER_BASE_URL`, `EXPORT_CENTER_TOKEN`, `EXPORT_CENTER_TENANT`, `EXPORT_CENTER_PROJECT`, `EXPORT_CENTER_AUTO_SCHEDULE`.
|
||||
|
||||
Artifacts live under `out/mirror/thin/`.
|
||||
|
||||
122
scripts/mirror/export-center-wire.sh
Executable file
122
scripts/mirror/export-center-wire.sh
Executable file
@@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Prepare Export Center handoff metadata for mirror thin bundles and optionally schedule a run.
|
||||
# Usage (handoff only):
|
||||
# scripts/mirror/export-center-wire.sh
|
||||
# Usage (handoff + schedule when secrets exist):
|
||||
# EXPORT_CENTER_BASE_URL=https://export.example.com \
|
||||
# EXPORT_CENTER_TOKEN=token123 \
|
||||
# EXPORT_CENTER_TENANT=tenant-a \
|
||||
# EXPORT_CENTER_AUTO_SCHEDULE=1 \
|
||||
# scripts/mirror/export-center-wire.sh
|
||||
# Inputs:
|
||||
# - MILESTONE_PATH: path to milestone.json (default: out/mirror/thin/milestone.json)
|
||||
# - EXPORT_CENTER_OUT_DIR: output directory for handoff files (default: out/mirror/thin/export-center)
|
||||
# - EXPORT_CENTER_PROFILE_ID: profile identifier for the Export Center run (default: mirror:thin)
|
||||
# - EXPORT_CENTER_TARGETS_JSON: override targets array sent to Export Center (JSON array string)
|
||||
# - EXPORT_CENTER_FORMATS_JSON: override formats array (JSON array string; default: ["tar.gz","json","dsse"])
|
||||
# - EXPORT_CENTER_AUTO_SCHEDULE: when "1", schedule a run using schedule-export-center-run.sh
|
||||
# - EXPORT_CENTER_BASE_URL / EXPORT_CENTER_TENANT / EXPORT_CENTER_PROJECT / EXPORT_CENTER_TOKEN: forwarded to scheduler
|
||||
# - EXPORT_CENTER_AUDIT_LOG: optional override for scheduler audit log path
|
||||
|
||||
MILESTONE_PATH="${MILESTONE_PATH:-out/mirror/thin/milestone.json}"
|
||||
OUT_DIR="${EXPORT_CENTER_OUT_DIR:-out/mirror/thin/export-center}"
|
||||
PROFILE_ID="${EXPORT_CENTER_PROFILE_ID:-mirror:thin}"
|
||||
FORMATS_JSON="${EXPORT_CENTER_FORMATS_JSON:-[\"tar.gz\",\"json\",\"dsse\"]}"
|
||||
AUTO_SCHEDULE="${EXPORT_CENTER_AUTO_SCHEDULE:-0}"
|
||||
|
||||
HANDOFF_PATH="${OUT_DIR}/export-center-handoff.json"
|
||||
TARGETS_PATH="${OUT_DIR}/export-center-targets.json"
|
||||
RESPONSE_PATH="${OUT_DIR}/schedule-response.json"
|
||||
|
||||
export HANDOFF_PATH TARGETS_PATH RESPONSE_PATH PROFILE_ID MILESTONE_PATH
|
||||
|
||||
mkdir -p "${OUT_DIR}"
|
||||
|
||||
PROFILE_ID="${PROFILE_ID}" MILESTONE_PATH="${MILESTONE_PATH}" HANDOFF_PATH="${HANDOFF_PATH}" TARGETS_PATH="${TARGETS_PATH}" python3 - <<'PY'
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import Dict, Any
|
||||
|
||||
milestone_path = os.environ["MILESTONE_PATH"]
|
||||
handoff_path = os.environ["HANDOFF_PATH"]
|
||||
targets_path = os.environ["TARGETS_PATH"]
|
||||
profile = os.environ.get("PROFILE_ID", "mirror:thin")
|
||||
|
||||
try:
|
||||
with open(milestone_path, encoding="utf-8") as f:
|
||||
milestone = json.load(f)
|
||||
except FileNotFoundError:
|
||||
print(f"milestone file not found: {milestone_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
artifacts = []
|
||||
|
||||
def add_artifact(name: str, entry: Dict[str, Any] | None) -> None:
|
||||
if not isinstance(entry, dict):
|
||||
return
|
||||
path = entry.get("path")
|
||||
sha = entry.get("sha256")
|
||||
if path and sha:
|
||||
artifacts.append({"name": name, "path": path, "sha256": sha})
|
||||
|
||||
add_artifact("manifest", milestone.get("manifest"))
|
||||
add_artifact("manifest_dsse", milestone.get("dsse"))
|
||||
add_artifact("bundle", milestone.get("tarball"))
|
||||
add_artifact("bundle_meta", milestone.get("bundle"))
|
||||
add_artifact("bundle_meta_dsse", milestone.get("bundle_dsse"))
|
||||
add_artifact("time_anchor", milestone.get("time_anchor"))
|
||||
|
||||
for name, entry in sorted((milestone.get("policies") or {}).items()):
|
||||
add_artifact(f"policy_{name}", entry)
|
||||
|
||||
handoff = {
|
||||
"profileId": profile,
|
||||
"generatedAt": datetime.datetime.now(datetime.timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z"),
|
||||
"sourceMilestone": os.path.abspath(milestone_path),
|
||||
"artifacts": artifacts,
|
||||
}
|
||||
|
||||
with open(handoff_path, "w", encoding="utf-8") as f:
|
||||
json.dump(handoff, f, indent=2)
|
||||
|
||||
with open(targets_path, "w", encoding="utf-8") as f:
|
||||
json.dump([a["name"] for a in artifacts], f)
|
||||
PY
|
||||
|
||||
ARTIFACTS_JSON=$(python3 - <<'PY'
|
||||
import json
|
||||
import os
|
||||
with open(os.environ["HANDOFF_PATH"], encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(json.dumps(data.get("artifacts", [])))
|
||||
PY
|
||||
)
|
||||
ARTIFACTS_JSON="${ARTIFACTS_JSON//$'\n'/}"
|
||||
|
||||
TARGETS_JSON_DEFAULT=$(tr -d '\r\n' < "${TARGETS_PATH}")
|
||||
TARGETS_JSON="${EXPORT_CENTER_TARGETS_JSON:-$TARGETS_JSON_DEFAULT}"
|
||||
|
||||
echo "[info] Export Center handoff written to ${HANDOFF_PATH}"
|
||||
echo "[info] Recommended targets: ${TARGETS_JSON}"
|
||||
|
||||
schedule_note="AUTO_SCHEDULE=0"
|
||||
if [[ "${AUTO_SCHEDULE}" == "1" ]]; then
|
||||
schedule_note="missing EXPORT_CENTER_BASE_URL"
|
||||
if [[ -n "${EXPORT_CENTER_BASE_URL:-}" ]]; then
|
||||
export EXPORT_CENTER_ARTIFACTS_JSON="${ARTIFACTS_JSON}"
|
||||
schedule_note="scheduled"
|
||||
bash src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh "${PROFILE_ID}" "${TARGETS_JSON}" "${FORMATS_JSON}" | tee "${RESPONSE_PATH}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ! -f "${RESPONSE_PATH}" ]]; then
|
||||
cat > "${RESPONSE_PATH}" <<JSON
|
||||
{"scheduled": false, "reason": "${schedule_note}"}
|
||||
JSON
|
||||
fi
|
||||
|
||||
echo "[info] Scheduler response captured at ${RESPONSE_PATH}"
|
||||
13
scripts/signals/reachability-smoke.sh
Executable file
13
scripts/signals/reachability-smoke.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Lightweight smoke for SIGNALS-24-004/005: run reachability scoring + cache/event tests.
|
||||
# Uses existing unit tests as fixtures; intended for CI and local preflight.
|
||||
|
||||
ROOT="${1:-src/Signals/__Tests/StellaOps.Signals.Tests/StellaOps.Signals.Tests.csproj}"
|
||||
FILTER="${FILTER:-ReachabilityScoringServiceTests|RuntimeFactsIngestionServiceTests.IngestAsync_AggregatesHits_AndRecomputesReachability|InMemoryEventsPublisherTests}"
|
||||
|
||||
echo "[info] Running reachability smoke against ${ROOT}"
|
||||
dotnet test "${ROOT}" -c Release --no-build --filter "${FILTER}" --logger "console;verbosity=normal"
|
||||
|
||||
echo "[info] Reachability smoke succeeded."
|
||||
Reference in New Issue
Block a user