CD/CD consolidation
This commit is contained in:
33
devops/telemetry/validation/README.md
Normal file
33
devops/telemetry/validation/README.md
Normal file
@@ -0,0 +1,33 @@
|
||||
# Telemetry bundle verifier
|
||||
|
||||
Files:
|
||||
- `verify-telemetry-bundle.sh`: offline verifier (checksums + optional JSON schema)
|
||||
- `tests/sample-bundle/telemetry-bundle.json`: sample manifest
|
||||
- `tests/sample-bundle/telemetry-bundle.sha256`: checksum list for sample bundle
|
||||
- `tests/telemetry-bundle.tar`: deterministic sample bundle (ustar, mtime=0, owner/group 0)
|
||||
- `tests/run-schema-tests.sh`: validates sample config against config schema
|
||||
- `tests/ci-run.sh`: runs schema test + bundle verifier (use in CI)
|
||||
|
||||
Dependencies for full validation:
|
||||
- `python` with `jsonschema` installed (`pip install jsonschema`)
|
||||
- `tar`, `sha256sum`
|
||||
|
||||
Deterministic TAR flags used for sample bundle:
|
||||
`tar --mtime=@0 --owner=0 --group=0 --numeric-owner --format=ustar`
|
||||
|
||||
Exit codes:
|
||||
- 0 success
|
||||
- 21 missing manifest/checksums
|
||||
- 22 checksum mismatch
|
||||
- 23 schema validation failed
|
||||
- 64 usage error
|
||||
|
||||
Quick check:
|
||||
```bash
|
||||
./verify-telemetry-bundle.sh tests/telemetry-bundle.tar
|
||||
```
|
||||
|
||||
CI suggestion:
|
||||
```bash
|
||||
ops/devops/telemetry/tests/ci-run.sh
|
||||
```
|
||||
77
devops/telemetry/validation/generate_dev_tls.sh
Normal file
77
devops/telemetry/validation/generate_dev_tls.sh
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CERT_DIR="${SCRIPT_DIR}/../../deploy/telemetry/certs"
|
||||
|
||||
mkdir -p "${CERT_DIR}"
|
||||
|
||||
CA_KEY="${CERT_DIR}/ca.key"
|
||||
CA_CRT="${CERT_DIR}/ca.crt"
|
||||
COL_KEY="${CERT_DIR}/collector.key"
|
||||
COL_CSR="${CERT_DIR}/collector.csr"
|
||||
COL_CRT="${CERT_DIR}/collector.crt"
|
||||
CLIENT_KEY="${CERT_DIR}/client.key"
|
||||
CLIENT_CSR="${CERT_DIR}/client.csr"
|
||||
CLIENT_CRT="${CERT_DIR}/client.crt"
|
||||
|
||||
echo "[*] Generating OpenTelemetry dev CA and certificates in ${CERT_DIR}"
|
||||
|
||||
# Root CA
|
||||
if [[ ! -f "${CA_KEY}" ]]; then
|
||||
openssl genrsa -out "${CA_KEY}" 4096 >/dev/null 2>&1
|
||||
fi
|
||||
openssl req -x509 -new -key "${CA_KEY}" -days 365 -sha256 \
|
||||
-out "${CA_CRT}" -subj "/CN=StellaOps Dev Telemetry CA" \
|
||||
-config <(cat <<'EOF'
|
||||
[req]
|
||||
distinguished_name = req_distinguished_name
|
||||
prompt = no
|
||||
[req_distinguished_name]
|
||||
EOF
|
||||
) >/dev/null 2>&1
|
||||
|
||||
# Collector certificate (server + client auth)
|
||||
openssl req -new -nodes -newkey rsa:4096 \
|
||||
-keyout "${COL_KEY}" \
|
||||
-out "${COL_CSR}" \
|
||||
-subj "/CN=stellaops-otel-collector" >/dev/null 2>&1
|
||||
|
||||
openssl x509 -req -in "${COL_CSR}" -CA "${CA_CRT}" -CAkey "${CA_KEY}" \
|
||||
-CAcreateserial -out "${COL_CRT}" -days 365 -sha256 \
|
||||
-extensions v3_req -extfile <(cat <<'EOF'
|
||||
[v3_req]
|
||||
subjectAltName = @alt_names
|
||||
extendedKeyUsage = serverAuth, clientAuth
|
||||
[alt_names]
|
||||
DNS.1 = stellaops-otel-collector
|
||||
DNS.2 = localhost
|
||||
IP.1 = 127.0.0.1
|
||||
EOF
|
||||
) >/dev/null 2>&1
|
||||
|
||||
# Client certificate
|
||||
openssl req -new -nodes -newkey rsa:4096 \
|
||||
-keyout "${CLIENT_KEY}" \
|
||||
-out "${CLIENT_CSR}" \
|
||||
-subj "/CN=stellaops-otel-client" >/dev/null 2>&1
|
||||
|
||||
openssl x509 -req -in "${CLIENT_CSR}" -CA "${CA_CRT}" -CAkey "${CA_KEY}" \
|
||||
-CAcreateserial -out "${CLIENT_CRT}" -days 365 -sha256 \
|
||||
-extensions v3_req -extfile <(cat <<'EOF'
|
||||
[v3_req]
|
||||
extendedKeyUsage = clientAuth
|
||||
subjectAltName = @alt_names
|
||||
[alt_names]
|
||||
DNS.1 = stellaops-otel-client
|
||||
DNS.2 = localhost
|
||||
IP.1 = 127.0.0.1
|
||||
EOF
|
||||
) >/dev/null 2>&1
|
||||
|
||||
rm -f "${COL_CSR}" "${CLIENT_CSR}"
|
||||
rm -f "${CERT_DIR}/ca.srl"
|
||||
|
||||
echo "[✓] Certificates ready:"
|
||||
ls -1 "${CERT_DIR}"
|
||||
136
devops/telemetry/validation/package_offline_bundle.py
Normal file
136
devops/telemetry/validation/package_offline_bundle.py
Normal file
@@ -0,0 +1,136 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Package telemetry collector assets for offline/air-gapped installs.
|
||||
|
||||
Outputs a tarball containing the collector configuration, Compose overlay,
|
||||
Helm defaults, and operator README. A SHA-256 checksum sidecar is emitted, and
|
||||
optional Cosign signing can be enabled with --sign.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
from typing import Iterable
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||
DEFAULT_OUTPUT = REPO_ROOT / "out" / "telemetry" / "telemetry-offline-bundle.tar.gz"
|
||||
BUNDLE_CONTENTS: tuple[Path, ...] = (
|
||||
Path("deploy/telemetry/README.md"),
|
||||
Path("deploy/telemetry/otel-collector-config.yaml"),
|
||||
Path("deploy/telemetry/storage/README.md"),
|
||||
Path("deploy/telemetry/storage/prometheus.yaml"),
|
||||
Path("deploy/telemetry/storage/tempo.yaml"),
|
||||
Path("deploy/telemetry/storage/loki.yaml"),
|
||||
Path("deploy/telemetry/storage/tenants/tempo-overrides.yaml"),
|
||||
Path("deploy/telemetry/storage/tenants/loki-overrides.yaml"),
|
||||
Path("deploy/helm/stellaops/files/otel-collector-config.yaml"),
|
||||
Path("deploy/helm/stellaops/values.yaml"),
|
||||
Path("deploy/helm/stellaops/templates/otel-collector.yaml"),
|
||||
Path("deploy/compose/docker-compose.telemetry.yaml"),
|
||||
Path("deploy/compose/docker-compose.telemetry-storage.yaml"),
|
||||
Path("docs/modules/telemetry/operations/collector.md"),
|
||||
Path("docs/modules/telemetry/operations/storage.md"),
|
||||
)
|
||||
|
||||
|
||||
def compute_sha256(path: Path) -> str:
|
||||
sha = hashlib.sha256()
|
||||
with path.open("rb") as handle:
|
||||
for chunk in iter(lambda: handle.read(1024 * 1024), b""):
|
||||
sha.update(chunk)
|
||||
return sha.hexdigest()
|
||||
|
||||
|
||||
def validate_files(paths: Iterable[Path]) -> None:
|
||||
missing = [str(p) for p in paths if not (REPO_ROOT / p).exists()]
|
||||
if missing:
|
||||
raise FileNotFoundError(f"Missing bundle artefacts: {', '.join(missing)}")
|
||||
|
||||
|
||||
def create_bundle(output_path: Path) -> Path:
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with tarfile.open(output_path, "w:gz") as tar:
|
||||
for rel_path in BUNDLE_CONTENTS:
|
||||
abs_path = REPO_ROOT / rel_path
|
||||
tar.add(abs_path, arcname=str(rel_path))
|
||||
return output_path
|
||||
|
||||
|
||||
def write_checksum(bundle_path: Path) -> Path:
|
||||
digest = compute_sha256(bundle_path)
|
||||
sha_path = bundle_path.with_suffix(bundle_path.suffix + ".sha256")
|
||||
sha_path.write_text(f"{digest} {bundle_path.name}\n", encoding="utf-8")
|
||||
return sha_path
|
||||
|
||||
|
||||
def cosign_sign(bundle_path: Path, key_ref: str | None, identity_token: str | None) -> None:
|
||||
cmd = ["cosign", "sign-blob", "--yes", str(bundle_path)]
|
||||
if key_ref:
|
||||
cmd.extend(["--key", key_ref])
|
||||
env = os.environ.copy()
|
||||
if identity_token:
|
||||
env["COSIGN_IDENTITY_TOKEN"] = identity_token
|
||||
try:
|
||||
subprocess.run(cmd, check=True, env=env)
|
||||
except FileNotFoundError as exc:
|
||||
raise RuntimeError("cosign not found on PATH; install cosign or omit --sign") from exc
|
||||
except subprocess.CalledProcessError as exc:
|
||||
raise RuntimeError(f"cosign sign-blob failed: {exc}") from exc
|
||||
|
||||
|
||||
def parse_args(argv: list[str] | None = None) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
type=Path,
|
||||
default=DEFAULT_OUTPUT,
|
||||
help=f"Output bundle path (default: {DEFAULT_OUTPUT})",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sign",
|
||||
action="store_true",
|
||||
help="Sign the bundle using cosign (requires cosign on PATH)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--cosign-key",
|
||||
type=str,
|
||||
default=os.environ.get("COSIGN_KEY_REF"),
|
||||
help="Cosign key reference (file:..., azurekms://..., etc.)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--identity-token",
|
||||
type=str,
|
||||
default=os.environ.get("COSIGN_IDENTITY_TOKEN"),
|
||||
help="OIDC identity token for keyless signing",
|
||||
)
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
args = parse_args(argv)
|
||||
validate_files(BUNDLE_CONTENTS)
|
||||
|
||||
bundle_path = args.output.resolve()
|
||||
print(f"[*] Creating telemetry bundle at {bundle_path}")
|
||||
create_bundle(bundle_path)
|
||||
sha_path = write_checksum(bundle_path)
|
||||
print(f"[✓] SHA-256 written to {sha_path}")
|
||||
|
||||
if args.sign:
|
||||
print("[*] Signing bundle with cosign")
|
||||
cosign_sign(bundle_path, args.cosign_key, args.identity_token)
|
||||
sig_path = bundle_path.with_suffix(bundle_path.suffix + ".sig")
|
||||
if sig_path.exists():
|
||||
print(f"[✓] Cosign signature written to {sig_path}")
|
||||
else:
|
||||
print("[!] Cosign completed but signature file not found (ensure cosign version >= 2.2)")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
197
devops/telemetry/validation/smoke_otel_collector.py
Normal file
197
devops/telemetry/validation/smoke_otel_collector.py
Normal file
@@ -0,0 +1,197 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Smoke test for the StellaOps OpenTelemetry Collector deployment.
|
||||
|
||||
The script sends sample traces, metrics, and logs over OTLP/HTTP with mutual TLS
|
||||
and asserts that the collector accepted the payloads by checking its Prometheus
|
||||
metrics endpoint.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import ssl
|
||||
import sys
|
||||
import time
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
|
||||
TRACE_PAYLOAD = {
|
||||
"resourceSpans": [
|
||||
{
|
||||
"resource": {
|
||||
"attributes": [
|
||||
{"key": "service.name", "value": {"stringValue": "smoke-client"}},
|
||||
{"key": "tenant.id", "value": {"stringValue": "dev"}},
|
||||
]
|
||||
},
|
||||
"scopeSpans": [
|
||||
{
|
||||
"scope": {"name": "smoke-test"},
|
||||
"spans": [
|
||||
{
|
||||
"traceId": "00000000000000000000000000000001",
|
||||
"spanId": "0000000000000001",
|
||||
"name": "smoke-span",
|
||||
"kind": 1,
|
||||
"startTimeUnixNano": "1730000000000000000",
|
||||
"endTimeUnixNano": "1730000000500000000",
|
||||
"status": {"code": 0},
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
METRIC_PAYLOAD = {
|
||||
"resourceMetrics": [
|
||||
{
|
||||
"resource": {
|
||||
"attributes": [
|
||||
{"key": "service.name", "value": {"stringValue": "smoke-client"}},
|
||||
{"key": "tenant.id", "value": {"stringValue": "dev"}},
|
||||
]
|
||||
},
|
||||
"scopeMetrics": [
|
||||
{
|
||||
"scope": {"name": "smoke-test"},
|
||||
"metrics": [
|
||||
{
|
||||
"name": "smoke_gauge",
|
||||
"gauge": {
|
||||
"dataPoints": [
|
||||
{
|
||||
"asDouble": 1.0,
|
||||
"timeUnixNano": "1730000001000000000",
|
||||
"attributes": [
|
||||
{"key": "phase", "value": {"stringValue": "ingest"}}
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
LOG_PAYLOAD = {
|
||||
"resourceLogs": [
|
||||
{
|
||||
"resource": {
|
||||
"attributes": [
|
||||
{"key": "service.name", "value": {"stringValue": "smoke-client"}},
|
||||
{"key": "tenant.id", "value": {"stringValue": "dev"}},
|
||||
]
|
||||
},
|
||||
"scopeLogs": [
|
||||
{
|
||||
"scope": {"name": "smoke-test"},
|
||||
"logRecords": [
|
||||
{
|
||||
"timeUnixNano": "1730000002000000000",
|
||||
"severityNumber": 9,
|
||||
"severityText": "Info",
|
||||
"body": {"stringValue": "StellaOps collector smoke log"},
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def _load_context(ca: Path, cert: Path, key: Path) -> ssl.SSLContext:
|
||||
context = ssl.create_default_context(cafile=str(ca))
|
||||
context.check_hostname = False
|
||||
context.verify_mode = ssl.CERT_REQUIRED
|
||||
context.load_cert_chain(certfile=str(cert), keyfile=str(key))
|
||||
return context
|
||||
|
||||
|
||||
def _post_json(url: str, payload: dict, context: ssl.SSLContext) -> None:
|
||||
data = json.dumps(payload).encode("utf-8")
|
||||
request = urllib.request.Request(
|
||||
url,
|
||||
data=data,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "stellaops-otel-smoke/1.0",
|
||||
},
|
||||
method="POST",
|
||||
)
|
||||
with urllib.request.urlopen(request, context=context, timeout=10) as response:
|
||||
if response.status // 100 != 2:
|
||||
raise RuntimeError(f"{url} returned HTTP {response.status}")
|
||||
|
||||
|
||||
def _fetch_metrics(url: str, context: ssl.SSLContext) -> str:
|
||||
request = urllib.request.Request(
|
||||
url,
|
||||
headers={
|
||||
"User-Agent": "stellaops-otel-smoke/1.0",
|
||||
},
|
||||
)
|
||||
with urllib.request.urlopen(request, context=context, timeout=10) as response:
|
||||
return response.read().decode("utf-8")
|
||||
|
||||
|
||||
def _assert_counter(metrics: str, metric_name: str) -> None:
|
||||
for line in metrics.splitlines():
|
||||
if line.startswith(metric_name):
|
||||
try:
|
||||
_, value = line.split(" ")
|
||||
if float(value) > 0:
|
||||
return
|
||||
except ValueError:
|
||||
continue
|
||||
raise AssertionError(f"{metric_name} not incremented")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--host", default="localhost", help="Collector host (default: %(default)s)")
|
||||
parser.add_argument("--otlp-port", type=int, default=4318, help="OTLP/HTTP port")
|
||||
parser.add_argument("--metrics-port", type=int, default=9464, help="Prometheus metrics port")
|
||||
parser.add_argument("--health-port", type=int, default=13133, help="Health check port")
|
||||
parser.add_argument("--ca", type=Path, default=Path("deploy/telemetry/certs/ca.crt"), help="CA certificate path")
|
||||
parser.add_argument("--cert", type=Path, default=Path("deploy/telemetry/certs/client.crt"), help="Client certificate path")
|
||||
parser.add_argument("--key", type=Path, default=Path("deploy/telemetry/certs/client.key"), help="Client key path")
|
||||
args = parser.parse_args()
|
||||
|
||||
for path in (args.ca, args.cert, args.key):
|
||||
if not path.exists():
|
||||
print(f"[!] missing TLS material: {path}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
context = _load_context(args.ca, args.cert, args.key)
|
||||
|
||||
otlp_base = f"https://{args.host}:{args.otlp_port}/v1"
|
||||
print(f"[*] Sending OTLP traffic to {otlp_base}")
|
||||
_post_json(f"{otlp_base}/traces", TRACE_PAYLOAD, context)
|
||||
_post_json(f"{otlp_base}/metrics", METRIC_PAYLOAD, context)
|
||||
_post_json(f"{otlp_base}/logs", LOG_PAYLOAD, context)
|
||||
|
||||
# Allow Prometheus exporter to update metrics
|
||||
time.sleep(2)
|
||||
|
||||
metrics_url = f"https://{args.host}:{args.metrics_port}/metrics"
|
||||
print(f"[*] Fetching collector metrics from {metrics_url}")
|
||||
metrics = _fetch_metrics(metrics_url, context)
|
||||
|
||||
_assert_counter(metrics, "otelcol_receiver_accepted_spans")
|
||||
_assert_counter(metrics, "otelcol_receiver_accepted_logs")
|
||||
_assert_counter(metrics, "otelcol_receiver_accepted_metric_points")
|
||||
|
||||
print("[✓] Collector accepted traces, logs, and metrics.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
232
devops/telemetry/validation/tenant_isolation_smoke.py
Normal file
232
devops/telemetry/validation/tenant_isolation_smoke.py
Normal file
@@ -0,0 +1,232 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Tenant isolation smoke test for DEVOPS-OBS-50-002.
|
||||
|
||||
The script assumes the telemetry storage stack (Tempo + Loki) is running with
|
||||
mutual TLS enabled and enforces `X-Scope-OrgID` multi-tenancy. It performs the
|
||||
following checks:
|
||||
|
||||
1. Pushes a trace via the collector OTLP/HTTP endpoint and verifies it is
|
||||
retrievable from Tempo when using the matching tenant header, but not when
|
||||
querying as a different tenant.
|
||||
2. Pushes a log entry to Loki with a tenant header and verifies it is only
|
||||
visible to the matching tenant.
|
||||
|
||||
The goal is to provide a deterministic CI-friendly check that our storage
|
||||
configuration preserves tenant isolation guard rails before promoting bundles.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import ssl
|
||||
import sys
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _load_context(ca_file: Path, cert_file: Path, key_file: Path) -> ssl.SSLContext:
|
||||
context = ssl.create_default_context(cafile=str(ca_file))
|
||||
context.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||
context.check_hostname = False
|
||||
context.load_cert_chain(certfile=str(cert_file), keyfile=str(key_file))
|
||||
return context
|
||||
|
||||
|
||||
def _post_json(url: str, payload: dict, context: ssl.SSLContext, headers: dict | None = None) -> None:
|
||||
body = json.dumps(payload, separators=(",", ":")).encode("utf-8")
|
||||
request = urllib.request.Request(
|
||||
url,
|
||||
data=body,
|
||||
method="POST",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "stellaops-tenant-smoke/1.0",
|
||||
**(headers or {}),
|
||||
},
|
||||
)
|
||||
with urllib.request.urlopen(request, context=context, timeout=10) as response:
|
||||
status = response.status
|
||||
if status // 100 != 2:
|
||||
raise RuntimeError(f"POST {url} returned HTTP {status}")
|
||||
|
||||
|
||||
def _get(url: str, context: ssl.SSLContext, headers: dict | None = None) -> tuple[int, str]:
|
||||
request = urllib.request.Request(
|
||||
url,
|
||||
method="GET",
|
||||
headers={
|
||||
"User-Agent": "stellaops-tenant-smoke/1.0",
|
||||
**(headers or {}),
|
||||
},
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(request, context=context, timeout=10) as response:
|
||||
return response.status, response.read().decode("utf-8")
|
||||
except urllib.error.HTTPError as exc: # type: ignore[attr-defined]
|
||||
body = exc.read().decode("utf-8") if exc.fp else ""
|
||||
return exc.code, body
|
||||
|
||||
|
||||
def _payload_trace(trace_id: str, tenant: str) -> dict:
|
||||
return {
|
||||
"resourceSpans": [
|
||||
{
|
||||
"resource": {
|
||||
"attributes": [
|
||||
{"key": "service.name", "value": {"stringValue": "tenant-smoke"}},
|
||||
{"key": "tenant.id", "value": {"stringValue": tenant}},
|
||||
]
|
||||
},
|
||||
"scopeSpans": [
|
||||
{
|
||||
"scope": {"name": "tenant-smoke"},
|
||||
"spans": [
|
||||
{
|
||||
"traceId": trace_id,
|
||||
"spanId": "0000000000000001",
|
||||
"name": "tenant-check",
|
||||
"kind": 1,
|
||||
"startTimeUnixNano": "1730500000000000000",
|
||||
"endTimeUnixNano": "1730500000500000000",
|
||||
"status": {"code": 0},
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def _payload_log(ts_ns: int, tenant: str, marker: str) -> dict:
|
||||
return {
|
||||
"resourceLogs": [
|
||||
{
|
||||
"resource": {
|
||||
"attributes": [
|
||||
{"key": "service.name", "value": {"stringValue": "tenant-smoke"}},
|
||||
{"key": "tenant.id", "value": {"stringValue": tenant}},
|
||||
]
|
||||
},
|
||||
"scopeLogs": [
|
||||
{
|
||||
"scope": {"name": "tenant-smoke"},
|
||||
"logRecords": [
|
||||
{
|
||||
"timeUnixNano": str(ts_ns),
|
||||
"severityNumber": 9,
|
||||
"severityText": "Info",
|
||||
"body": {"stringValue": f"tenant={tenant} marker={marker}"},
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def _assert_tenant_access(
|
||||
tempo_url: str,
|
||||
loki_url: str,
|
||||
collector_url: str,
|
||||
tenant: str,
|
||||
other_tenant: str,
|
||||
context: ssl.SSLContext,
|
||||
) -> None:
|
||||
trace_id = uuid.uuid4().hex + uuid.uuid4().hex[:16]
|
||||
trace_payload = _payload_trace(trace_id, tenant)
|
||||
_post_json(f"{collector_url}/traces", trace_payload, context)
|
||||
|
||||
log_marker = uuid.uuid4().hex[:12]
|
||||
timestamp_ns = int(time.time() * 1_000_000_000)
|
||||
log_payload = _payload_log(timestamp_ns, tenant, log_marker)
|
||||
_post_json(f"{collector_url}/logs", log_payload, context)
|
||||
|
||||
# Allow background processing to flush to storage.
|
||||
time.sleep(2)
|
||||
|
||||
tempo_headers = {"X-Scope-OrgID": tenant}
|
||||
tempo_status, tempo_body = _get(f"{tempo_url}/api/traces/{trace_id}", context, headers=tempo_headers)
|
||||
if tempo_status != 200:
|
||||
raise AssertionError(f"Tempo returned HTTP {tempo_status} for tenant {tenant}: {tempo_body}")
|
||||
if trace_id not in tempo_body:
|
||||
raise AssertionError("Tempo response missing expected trace data")
|
||||
|
||||
other_status, _ = _get(
|
||||
f"{tempo_url}/api/traces/{trace_id}", context, headers={"X-Scope-OrgID": other_tenant}
|
||||
)
|
||||
if other_status not in (401, 403, 404):
|
||||
raise AssertionError(
|
||||
f"Tempo should deny tenant {other_tenant}, received status {other_status}"
|
||||
)
|
||||
|
||||
log_query = urllib.parse.urlencode({"query": "{app=\"tenant-smoke\"}"})
|
||||
loki_status, loki_body = _get(
|
||||
f"{loki_url}/loki/api/v1/query?{log_query}", context, headers={"X-Scope-OrgID": tenant}
|
||||
)
|
||||
if loki_status != 200:
|
||||
raise AssertionError(f"Loki returned HTTP {loki_status} for tenant {tenant}: {loki_body}")
|
||||
if log_marker not in loki_body:
|
||||
raise AssertionError("Loki response missing expected log entry")
|
||||
|
||||
other_log_status, other_log_body = _get(
|
||||
f"{loki_url}/loki/api/v1/query?{log_query}",
|
||||
context,
|
||||
headers={"X-Scope-OrgID": other_tenant},
|
||||
)
|
||||
if other_log_status == 200 and log_marker in other_log_body:
|
||||
raise AssertionError("Loki returned tenant data to the wrong org")
|
||||
if other_log_status not in (200, 401, 403):
|
||||
raise AssertionError(
|
||||
f"Unexpected Loki status when querying as {other_tenant}: {other_log_status}"
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--collector", default="https://localhost:4318/v1", help="Collector OTLP base URL")
|
||||
parser.add_argument("--tempo", default="https://localhost:3200", help="Tempo base URL")
|
||||
parser.add_argument("--loki", default="https://localhost:3100", help="Loki base URL")
|
||||
parser.add_argument("--tenant", default="dev", help="Primary tenant ID to test")
|
||||
parser.add_argument("--other-tenant", default="stage", help="Secondary tenant expected to be denied")
|
||||
parser.add_argument("--ca", type=Path, default=Path("deploy/telemetry/certs/ca.crt"), help="CA certificate path")
|
||||
parser.add_argument(
|
||||
"--cert", type=Path, default=Path("deploy/telemetry/certs/client.crt"), help="mTLS client certificate"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--key", type=Path, default=Path("deploy/telemetry/certs/client.key"), help="mTLS client key"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
for path in (args.ca, args.cert, args.key):
|
||||
if not path.exists():
|
||||
print(f"[!] missing TLS material: {path}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
context = _load_context(args.ca, args.cert, args.key)
|
||||
|
||||
collector_base = args.collector.rstrip("/")
|
||||
tempo_base = args.tempo.rstrip("/")
|
||||
loki_base = args.loki.rstrip("/")
|
||||
|
||||
print(f"[*] Validating tenant isolation using tenant={args.tenant} and other={args.other_tenant}")
|
||||
_assert_tenant_access(
|
||||
tempo_base,
|
||||
loki_base,
|
||||
collector_base,
|
||||
tenant=args.tenant,
|
||||
other_tenant=args.other_tenant,
|
||||
context=context,
|
||||
)
|
||||
|
||||
print("[✓] Tempo and Loki enforce tenant isolation with mTLS + scoped headers.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
7
devops/telemetry/validation/tests/ci-run.sh
Normal file
7
devops/telemetry/validation/tests/ci-run.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
ROOT="$(cd "$(dirname "$0")/../../" && pwd)"
|
||||
SCHEMA="$ROOT/docs/modules/telemetry/schemas/telemetry-bundle.schema.json"
|
||||
|
||||
"$ROOT/ops/devops/telemetry/tests/run-schema-tests.sh"
|
||||
TELEMETRY_BUNDLE_SCHEMA="$SCHEMA" "$ROOT/ops/devops/telemetry/verify-telemetry-bundle.sh" "$ROOT/ops/devops/telemetry/tests/telemetry-bundle.tar"
|
||||
35
devops/telemetry/validation/tests/config-valid.json
Normal file
35
devops/telemetry/validation/tests/config-valid.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"hashAlgorithm": "sha256",
|
||||
"profiles": [
|
||||
{
|
||||
"name": "default",
|
||||
"description": "default profile",
|
||||
"collectorVersion": "otelcol/1.0.0",
|
||||
"cryptoProfile": "fips",
|
||||
"sealedMode": false,
|
||||
"allowlistedEndpoints": ["http://localhost:4318"],
|
||||
"exporters": [
|
||||
{
|
||||
"type": "otlp",
|
||||
"endpoint": "http://localhost:4318",
|
||||
"protocol": "http",
|
||||
"compression": "none",
|
||||
"enabled": true
|
||||
}
|
||||
],
|
||||
"redactionPolicyUri": "https://example.com/redaction-policy.json",
|
||||
"sampling": {
|
||||
"strategy": "traceidratio",
|
||||
"seed": "0000000000000001",
|
||||
"rules": [
|
||||
{"match": "service.name == 'api'", "priority": 10, "sampleRate": 0.2}
|
||||
]
|
||||
},
|
||||
"tenantRouting": {
|
||||
"attribute": "tenant.id",
|
||||
"quotasPerTenant": {"tenant-a": 1000}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
9
devops/telemetry/validation/tests/make-sample.sh
Normal file
9
devops/telemetry/validation/tests/make-sample.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
ROOT="$(cd "$(dirname "$0")/../" && pwd)"
|
||||
BUNDLE_DIR="$ROOT/tests/sample-bundle"
|
||||
mkdir -p "$BUNDLE_DIR"
|
||||
cp "$ROOT/tests/manifest-valid.json" "$BUNDLE_DIR/telemetry-bundle.json"
|
||||
(cd "$BUNDLE_DIR" && sha256sum telemetry-bundle.json > telemetry-bundle.sha256)
|
||||
tar --mtime=@0 --owner=0 --group=0 --numeric-owner --format=ustar -C "$BUNDLE_DIR" -cf "$ROOT/tests/telemetry-bundle.tar" telemetry-bundle.json telemetry-bundle.sha256
|
||||
echo "Wrote sample bundle to $ROOT/tests/telemetry-bundle.tar"
|
||||
26
devops/telemetry/validation/tests/manifest-valid.json
Normal file
26
devops/telemetry/validation/tests/manifest-valid.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"bundleId": "00000000-0000-0000-0000-000000000001",
|
||||
"createdAt": "2025-12-01T00:00:00Z",
|
||||
"profileHash": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
"collectorVersion": "otelcol/1.0.0",
|
||||
"sealedMode": true,
|
||||
"redactionManifest": "redaction-manifest.json",
|
||||
"manifestHashAlgorithm": "sha256",
|
||||
"timeAnchor": {
|
||||
"type": "rfc3161",
|
||||
"value": "dummy-token"
|
||||
},
|
||||
"artifacts": [
|
||||
{
|
||||
"path": "logs.ndjson",
|
||||
"sha256": "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
|
||||
"mediaType": "application/x-ndjson",
|
||||
"size": 123
|
||||
}
|
||||
],
|
||||
"dsseEnvelope": {
|
||||
"hash": "cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc",
|
||||
"location": "bundle.dsse.json"
|
||||
}
|
||||
}
|
||||
19
devops/telemetry/validation/tests/run-schema-tests.sh
Normal file
19
devops/telemetry/validation/tests/run-schema-tests.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
ROOT="$(cd "$(dirname "$0")/../../" && pwd)"
|
||||
if ! command -v python >/dev/null 2>&1; then
|
||||
echo "python not found" >&2; exit 127; fi
|
||||
if ! python - <<'PY' >/dev/null 2>&1; then
|
||||
import jsonschema
|
||||
PY
|
||||
then
|
||||
echo "python jsonschema module not installed" >&2; exit 127; fi
|
||||
python - <<'PY'
|
||||
import json, pathlib
|
||||
from jsonschema import validate
|
||||
root = pathlib.Path('ops/devops/telemetry/tests')
|
||||
config = json.loads((root / 'config-valid.json').read_text())
|
||||
schema = json.loads(pathlib.Path('docs/modules/telemetry/schemas/telemetry-config.schema.json').read_text())
|
||||
validate(config, schema)
|
||||
print('telemetry-config schema ok')
|
||||
PY
|
||||
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"bundleId": "00000000-0000-0000-0000-000000000001",
|
||||
"createdAt": "2025-12-01T00:00:00Z",
|
||||
"profileHash": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
"collectorVersion": "otelcol/1.0.0",
|
||||
"sealedMode": true,
|
||||
"redactionManifest": "redaction-manifest.json",
|
||||
"manifestHashAlgorithm": "sha256",
|
||||
"timeAnchor": {
|
||||
"type": "rfc3161",
|
||||
"value": "dummy-token"
|
||||
},
|
||||
"artifacts": [
|
||||
{
|
||||
"path": "logs.ndjson",
|
||||
"sha256": "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
|
||||
"mediaType": "application/x-ndjson",
|
||||
"size": 123
|
||||
}
|
||||
],
|
||||
"dsseEnvelope": {
|
||||
"hash": "cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc",
|
||||
"location": "bundle.dsse.json"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
6e3fedbf183aece5dfa14a90ebce955e2887d36747c424e628dc2cc03bcb0ed3 telemetry-bundle.json
|
||||
@@ -0,0 +1 @@
|
||||
6e3fedbf183aece5dfa14a90ebce955e2887d36747c424e628dc2cc03bcb0ed3 ops/devops/telemetry/tests/manifest-valid.json
|
||||
BIN
devops/telemetry/validation/tests/telemetry-bundle.tar
Normal file
BIN
devops/telemetry/validation/tests/telemetry-bundle.tar
Normal file
Binary file not shown.
83
devops/telemetry/validation/validate_storage_stack.py
Normal file
83
devops/telemetry/validation/validate_storage_stack.py
Normal file
@@ -0,0 +1,83 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Static validation for the telemetry storage stack configuration.
|
||||
|
||||
Checks the Prometheus, Tempo, and Loki configuration snippets to ensure:
|
||||
- mutual TLS is enabled end-to-end
|
||||
- tenant override files are referenced
|
||||
- multitenancy flags are set
|
||||
- retention/limit defaults exist for __default__ tenant entries
|
||||
|
||||
This script is intended to back `DEVOPS-OBS-50-002` and can run in CI
|
||||
before publishing bundles or rolling out staging updates.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||
PROMETHEUS_PATH = REPO_ROOT / "deploy/telemetry/storage/prometheus.yaml"
|
||||
TEMPO_PATH = REPO_ROOT / "deploy/telemetry/storage/tempo.yaml"
|
||||
LOKI_PATH = REPO_ROOT / "deploy/telemetry/storage/loki.yaml"
|
||||
TEMPO_OVERRIDES_PATH = REPO_ROOT / "deploy/telemetry/storage/tenants/tempo-overrides.yaml"
|
||||
LOKI_OVERRIDES_PATH = REPO_ROOT / "deploy/telemetry/storage/tenants/loki-overrides.yaml"
|
||||
|
||||
|
||||
def read(path: Path) -> str:
|
||||
if not path.exists():
|
||||
raise FileNotFoundError(f"Required configuration file missing: {path}")
|
||||
return path.read_text(encoding="utf-8")
|
||||
|
||||
|
||||
def assert_contains(haystack: str, needle: str, path: Path) -> None:
|
||||
if needle not in haystack:
|
||||
raise AssertionError(f"{path} is missing required snippet: {needle!r}")
|
||||
|
||||
|
||||
def validate_prometheus() -> None:
|
||||
content = read(PROMETHEUS_PATH)
|
||||
assert_contains(content, "tls_config:", PROMETHEUS_PATH)
|
||||
assert_contains(content, "ca_file:", PROMETHEUS_PATH)
|
||||
assert_contains(content, "cert_file:", PROMETHEUS_PATH)
|
||||
assert_contains(content, "key_file:", PROMETHEUS_PATH)
|
||||
assert_contains(content, "authorization:", PROMETHEUS_PATH)
|
||||
assert_contains(content, "credentials_file:", PROMETHEUS_PATH)
|
||||
|
||||
|
||||
def validate_tempo() -> None:
|
||||
content = read(TEMPO_PATH)
|
||||
assert_contains(content, "multitenancy_enabled: true", TEMPO_PATH)
|
||||
assert_contains(content, "require_client_cert: true", TEMPO_PATH)
|
||||
assert_contains(content, "per_tenant_override_config", TEMPO_PATH)
|
||||
overrides = read(TEMPO_OVERRIDES_PATH)
|
||||
assert_contains(overrides, "__default__", TEMPO_OVERRIDES_PATH)
|
||||
assert_contains(overrides, "traces_per_second_limit", TEMPO_OVERRIDES_PATH)
|
||||
assert_contains(overrides, "max_bytes_per_trace", TEMPO_OVERRIDES_PATH)
|
||||
|
||||
|
||||
def validate_loki() -> None:
|
||||
content = read(LOKI_PATH)
|
||||
assert_contains(content, "auth_enabled: true", LOKI_PATH)
|
||||
assert_contains(content, "per_tenant_override_config", LOKI_PATH)
|
||||
overrides = read(LOKI_OVERRIDES_PATH)
|
||||
assert_contains(overrides, "__default__", LOKI_OVERRIDES_PATH)
|
||||
assert_contains(overrides, "retention_period", LOKI_OVERRIDES_PATH)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
try:
|
||||
validate_prometheus()
|
||||
validate_tempo()
|
||||
validate_loki()
|
||||
except (AssertionError, FileNotFoundError) as exc:
|
||||
print(f"[❌] telemetry storage validation failed: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
print("[✓] telemetry storage configuration meets multi-tenant guard rails.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
76
devops/telemetry/validation/verify-telemetry-bundle.sh
Normal file
76
devops/telemetry/validation/verify-telemetry-bundle.sh
Normal file
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Minimal offline verifier for telemetry bundles (v1)
|
||||
# Exits:
|
||||
# 0 success
|
||||
# 21 checksum/manifest missing
|
||||
# 22 checksum mismatch
|
||||
# 23 schema validation failed
|
||||
|
||||
BUNDLE=${1:-}
|
||||
SCHEMA_PATH=${TELEMETRY_BUNDLE_SCHEMA:-}
|
||||
|
||||
if [[ -z "$BUNDLE" ]]; then
|
||||
echo "Usage: $0 path/to/telemetry-bundle.tar" >&2
|
||||
echo "Optional: set TELEMETRY_BUNDLE_SCHEMA=/abs/path/to/telemetry-bundle.schema.json" >&2
|
||||
exit 64
|
||||
fi
|
||||
|
||||
WORKDIR=$(mktemp -d)
|
||||
cleanup() { rm -rf "$WORKDIR"; }
|
||||
trap cleanup EXIT
|
||||
|
||||
tar --extract --file "$BUNDLE" --directory "$WORKDIR"
|
||||
|
||||
MANIFEST="$WORKDIR/telemetry-bundle.json"
|
||||
HASHES="$WORKDIR/telemetry-bundle.sha256"
|
||||
|
||||
if [[ ! -f "$MANIFEST" || ! -f "$HASHES" ]]; then
|
||||
echo "Missing manifest or checksum file." >&2
|
||||
exit 21
|
||||
fi
|
||||
|
||||
# Verify checksums
|
||||
pushd "$WORKDIR" >/dev/null
|
||||
if ! sha256sum --quiet --check telemetry-bundle.sha256; then
|
||||
echo "Checksum mismatch." >&2
|
||||
exit 22
|
||||
fi
|
||||
popd >/dev/null
|
||||
|
||||
# JSON schema validation (optional if jsonschema not present).
|
||||
if command -v python >/dev/null 2>&1; then
|
||||
SCHEMA_FILE="$SCHEMA_PATH"
|
||||
if [[ -z "$SCHEMA_FILE" ]]; then
|
||||
SCHEMA_DIR="$(cd "$(dirname "$0")/../../docs/modules/telemetry/schemas" 2>/dev/null || echo "")"
|
||||
SCHEMA_FILE="$SCHEMA_DIR/telemetry-bundle.schema.json"
|
||||
fi
|
||||
|
||||
if [[ -n "$SCHEMA_FILE" && -f "$SCHEMA_FILE" ]]; then
|
||||
python - "$MANIFEST" "$SCHEMA_FILE" <<'PY'
|
||||
import json, sys
|
||||
from jsonschema import validate, Draft202012Validator
|
||||
|
||||
manifest_path = sys.argv[1]
|
||||
schema_path = sys.argv[2]
|
||||
with open(manifest_path, 'r', encoding='utf-8') as f:
|
||||
manifest = json.load(f)
|
||||
with open(schema_path, 'r', encoding='utf-8') as f:
|
||||
schema = json.load(f)
|
||||
Draft202012Validator.check_schema(schema)
|
||||
validate(manifest, schema)
|
||||
PY
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo "Schema validation failed." >&2
|
||||
exit 23
|
||||
fi
|
||||
else
|
||||
echo "Schema file not found ($SCHEMA_FILE); skipping validation." >&2
|
||||
fi
|
||||
else
|
||||
echo "jsonschema validation skipped (requires python + jsonschema)." >&2
|
||||
fi
|
||||
|
||||
echo "Telemetry bundle verified." >&2
|
||||
exit 0
|
||||
Reference in New Issue
Block a user