Compare commits
398 Commits
18f28168f0
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6af4e16d7c | ||
|
|
9a4cd2e0f7 | ||
|
|
335ff7da16 | ||
|
|
32f9581aa7 | ||
|
|
75de089ee8 | ||
|
|
b4fc66feb6 | ||
|
|
f10d83c444 | ||
|
|
c786faae84 | ||
|
|
a866eb6277 | ||
|
|
d2ac60c0e6 | ||
|
|
07198f9453 | ||
|
|
41f3ac7aba | ||
|
|
81e4d76fb8 | ||
|
|
907783f625 | ||
|
|
c8f3120174 | ||
|
|
7792749bb4 | ||
|
|
22390057fc | ||
|
|
ebce1c80b1 | ||
|
|
e95eff2542 | ||
|
|
e59b5e257c | ||
|
|
4f6dd4de83 | ||
|
|
fb17937958 | ||
|
|
e0ec5261de | ||
|
|
39359da171 | ||
|
|
17613acf57 | ||
|
|
ed3079543c | ||
|
|
aa70af062e | ||
|
|
d71853ad7e | ||
|
|
ad7fbc47a1 | ||
|
|
702c3106a8 | ||
|
|
4dfa1b8e05 | ||
|
|
b8b2d83f4a | ||
|
|
ef6ac36323 | ||
|
|
0103defcff | ||
|
|
82a49f6743 | ||
|
|
2a06f780cf | ||
|
|
223843f1d1 | ||
|
|
deb82b4f03 | ||
|
|
b9f71fc7e9 | ||
|
|
43e2af88f6 | ||
|
|
4231305fec | ||
|
|
8197588e74 | ||
|
|
2c2bbf1005 | ||
|
|
5540ce9430 | ||
|
|
40362de568 | ||
|
|
02772c7a27 | ||
|
|
9a08d10b89 | ||
|
|
7503c19b8f | ||
|
|
e59921374e | ||
|
|
491e883653 | ||
|
|
5590a99a1a | ||
|
|
7ac70ece71 | ||
|
|
dac8e10e36 | ||
|
|
b444284be5 | ||
|
|
fda92af9bc | ||
|
|
fcb5ffe25d | ||
|
|
84d97fd22c | ||
|
|
ef933db0d8 | ||
|
|
c8a871dd30 | ||
|
|
396e9b75a4 | ||
|
|
21337f4de6 | ||
|
|
541a936d03 | ||
|
|
342c35f8ce | ||
|
|
56e2dc01ee | ||
|
|
7e384ab610 | ||
|
|
e47627cfff | ||
|
|
5146204f1b | ||
|
|
3ba7157b00 | ||
|
|
4602ccc3a3 | ||
|
|
0536a4f7d4 | ||
|
|
dfaa2079aa | ||
|
|
00bc4f79dd | ||
|
|
634233dfed | ||
|
|
df94136727 | ||
|
|
aff0ceb2fe | ||
|
|
9a1572e11e | ||
| 53503cb407 | |||
| 5d398ec442 | |||
|
|
292a6e94e8 | ||
|
|
22d67f203f | ||
|
|
f897808c54 | ||
|
|
1e0e61659f | ||
|
|
01a2a2dc16 | ||
|
|
a216d7eea4 | ||
|
|
8a4edee665 | ||
|
|
2e98f6f3b2 | ||
|
|
14746936a9 | ||
|
|
94ea6c5e88 | ||
|
|
ba2f015184 | ||
|
|
b9c288782b | ||
|
|
b7b27c8740 | ||
|
|
6928124d33 | ||
|
|
d55a353481 | ||
|
|
ad193449a7 | ||
|
|
2595094bb7 | ||
|
|
80b8254763 | ||
|
|
4b3db9ca85 | ||
|
|
09c7155f1b | ||
|
|
da315965ff | ||
|
|
efe9bd8cfe | ||
|
|
3c6e14fca5 | ||
|
|
3698ebf4a8 | ||
|
|
ce8cdcd23d | ||
|
|
0ada1b583f | ||
|
|
439f10966b | ||
|
|
5fc469ad98 | ||
|
|
edc91ea96f | ||
|
|
5b57b04484 | ||
|
|
91f3610b9d | ||
|
|
8779e9226f | ||
|
|
951a38d561 | ||
|
|
43882078a4 | ||
|
|
2eafe98d44 | ||
|
|
6410a6d082 | ||
|
|
f85d53888c | ||
|
|
1fcf550d3a | ||
|
|
0dc71e760a | ||
|
|
811f35cba7 | ||
|
|
00d2c99af9 | ||
|
|
7d5250238c | ||
|
|
28823a8960 | ||
|
|
b4235c134c | ||
| dee252940b | |||
|
|
8bbfe4d2d2 | ||
|
|
394b57f6bf | ||
|
|
3a2100aa78 | ||
|
|
417ef83202 | ||
|
|
2170a58734 | ||
|
|
415eff1207 | ||
|
|
b55d9fa68d | ||
|
|
5a480a3c2a | ||
|
|
4391f35d8a | ||
|
|
b1f40945b7 | ||
|
|
41864227d2 | ||
|
|
8137503221 | ||
|
|
08dab053c0 | ||
|
|
7ce83270d0 | ||
|
|
505fe7a885 | ||
|
|
0cb5c9abfb | ||
|
|
d59cc816c1 | ||
|
|
8c8f0c632d | ||
|
|
4344020dd1 | ||
|
|
b058dbe031 | ||
|
|
3411e825cd | ||
|
|
9202cd7da8 | ||
|
|
00c41790f4 | ||
|
|
2e70c9fdb6 | ||
| d233fa3529 | |||
|
|
e2e404e705 | ||
| 01f4943ab9 | |||
|
|
233873f620 | ||
|
|
f1a39c4ce3 | ||
|
|
6e45066e37 | ||
|
|
e00f6365da | ||
|
|
999e26a48e | ||
| d776e93b16 | |||
|
|
564df71bfb | ||
|
|
e1f1bef4c1 | ||
|
|
3f3473ee3a | ||
|
|
efaf3cb789 | ||
|
|
ce5ec9c158 | ||
|
|
ab22181e8b | ||
|
|
1995883476 | ||
|
|
0987cd6ac8 | ||
|
|
b83aa1aa0b | ||
|
|
ce1f282ce0 | ||
|
|
b8b493913a | ||
|
|
49922dff5a | ||
|
|
92bc4d3a07 | ||
|
|
0ad4777259 | ||
|
|
2bd189387e | ||
|
|
3a92c77a04 | ||
|
|
b7059d523e | ||
|
|
96e5646977 | ||
|
|
a3c7fe5e88 | ||
|
|
199aaf74d8 | ||
|
|
f30805ad7f | ||
|
|
689c656f20 | ||
|
|
108d1c64b3 | ||
|
|
bc0762e97d | ||
|
|
3d01bf9edc | ||
|
|
68bc53a07b | ||
|
|
4b124fb056 | ||
|
|
7c24ed96ee | ||
|
|
11597679ed | ||
|
|
e3f28a21ab | ||
|
|
a403979177 | ||
|
|
b8641b1959 | ||
|
|
98e6b76584 | ||
|
|
862bb6ed80 | ||
|
|
bd2529502e | ||
|
|
965cbf9574 | ||
|
|
af30fc322f | ||
|
|
e53a282fbe | ||
|
|
d907729778 | ||
|
|
8a72779c16 | ||
|
|
e0f6efecce | ||
|
|
98934170ca | ||
|
|
69651212ec | ||
|
|
53889d85e7 | ||
|
|
0de92144d2 | ||
|
|
9bd6a73926 | ||
|
|
4042fc2184 | ||
|
|
dd0067ea0b | ||
|
|
f6c22854a4 | ||
|
|
05597616d6 | ||
|
|
a6f1406509 | ||
|
|
0a8f8c14af | ||
|
|
7efee7dd41 | ||
|
|
952ba77924 | ||
|
|
23e463e346 | ||
|
|
849a70f9d1 | ||
|
|
868f8e0bb6 | ||
|
|
84c42ca2d8 | ||
|
|
efd6850c38 | ||
|
|
2b892ad1b2 | ||
|
|
e16d2b5224 | ||
|
|
5e514532df | ||
|
|
2141196496 | ||
|
|
bca02ec295 | ||
|
|
8cabdce3b6 | ||
|
|
6145d89468 | ||
|
|
ee317d3f61 | ||
|
|
4cc8bdb460 | ||
|
|
95ff83e0f0 | ||
|
|
3954615e81 | ||
|
|
8948b1a3e2 | ||
|
|
5cfcf0723a | ||
|
|
ba733b9f69 | ||
|
|
79d562ea5d | ||
|
|
a7cd10020a | ||
|
|
b978ae399f | ||
|
|
570746b7d9 | ||
|
|
8318b26370 | ||
|
|
1f76650b7e | ||
|
|
37304cf819 | ||
|
|
6beb9d7c4e | ||
|
|
be8c623e04 | ||
|
|
dd4bb50076 | ||
|
|
bf6ab6ba6f | ||
|
|
02849cc955 | ||
|
|
2eaf0f699b | ||
|
|
6c1177a6ce | ||
|
|
582a88e8f8 | ||
|
|
f0662dd45f | ||
|
|
43c281a8b2 | ||
| 91550196fe | |||
| e8eacde73e | |||
| 5d7c687a77 | |||
| ffa219cfeb | |||
|
|
579236bfce | ||
|
|
18d87c64c5 | ||
|
|
347c88342c | ||
|
|
cc69d332e3 | ||
|
|
53508ceccb | ||
|
|
6a299d231f | ||
|
|
635c70e828 | ||
|
|
0de3c8a3f0 | ||
|
|
175b750e29 | ||
|
|
8768c27f30 | ||
|
|
b018949a8d | ||
|
|
f214edff82 | ||
|
|
75f6942769 | ||
|
|
600f3a7a3c | ||
|
|
4dc7cf834a | ||
|
|
e1262eb916 | ||
|
|
2d079d61ed | ||
|
|
e0b585c799 | ||
|
|
de53785176 | ||
|
|
ca91f40051 | ||
|
|
35c8f9216f | ||
|
|
e923880694 | ||
|
|
37cba83708 | ||
|
|
ea1d58a89b | ||
|
|
47168fec38 | ||
|
|
6d049905c7 | ||
|
|
acbb0ff637 | ||
|
|
d785a9095f | ||
|
|
0c9e8d5d18 | ||
|
|
76ecea482e | ||
|
|
2d08f52715 | ||
|
|
885ce86af4 | ||
|
|
44171930ff | ||
|
|
909d9b6220 | ||
|
|
790801f329 | ||
|
|
c11d87d252 | ||
|
|
7df0677e34 | ||
|
|
b39eb34226 | ||
|
|
808ab87b21 | ||
|
|
25254e3831 | ||
|
|
0bef705bcc | ||
|
|
71e9a56cfd | ||
|
|
17d45a6d30 | ||
|
|
8f54ffa203 | ||
|
|
3488b22c0c | ||
|
|
7e7be4d2fd | ||
|
|
887b0a1c67 | ||
|
|
a4c4fda2a1 | ||
|
|
b34f13dc03 | ||
| 39d0ef6728 | |||
|
|
2548abc56f | ||
| b3656e5cb7 | |||
|
|
d040c001ac | ||
|
|
d1cbb905f8 | ||
|
|
05da719048 | ||
|
|
1c6730a1d2 | ||
|
|
3b96b2e3ea | ||
|
|
ef6e4b2067 | ||
|
|
8abbf9574d | ||
|
|
cfa2274d31 | ||
|
|
4c55b01222 | ||
|
|
e950474a77 | ||
|
|
e901d31acf | ||
|
|
c34fb7256d | ||
|
|
ea970ead2a | ||
|
|
d63af51f84 | ||
|
|
4831c7fcb0 | ||
|
|
1c782897f7 | ||
|
|
56e2f64d07 | ||
|
|
9f6e6f7fb3 | ||
|
|
6bee1fdcf5 | ||
|
|
d92973d6fd | ||
|
|
17826bdca1 | ||
|
|
7c39058386 | ||
|
|
46c8c47d06 | ||
|
|
e6119cbe91 | ||
|
|
150b3730ef | ||
|
|
5970f0d9bd | ||
|
|
bb709b643e | ||
|
|
0d4a986b7b | ||
|
|
7514eee949 | ||
|
|
029002ad05 | ||
| 2de8d1784b | |||
|
|
c13355923f | ||
|
|
fc99092dec | ||
|
|
c3ce1ebc25 | ||
|
|
7768555f2d | ||
|
|
cce96f3596 | ||
|
|
f47d2d1377 | ||
|
|
8d78dd219b | ||
|
|
2e89a92d92 | ||
|
|
48702191be | ||
|
|
ca09400069 | ||
|
|
dc7c75b496 | ||
|
|
967ae0ab16 | ||
|
|
b6b9ffc050 | ||
|
|
a7f3c7869a | ||
|
|
96352c9d27 | ||
|
|
f43e828b4e | ||
|
|
cbdc05b24d | ||
|
|
d519782a8f | ||
|
|
ca35db9ef4 | ||
|
|
79b8e53441 | ||
|
|
65b1599229 | ||
| 522fff73cd | |||
|
|
8ac994ed37 | ||
|
|
2e276d6676 | ||
|
|
f0e74d2ee8 | ||
|
|
10212d67c0 | ||
|
|
616ec73133 | ||
|
|
33c7e77273 | ||
|
|
e91da22836 | ||
|
|
d3ecd7f8e6 | ||
|
|
77cee6a209 | ||
|
|
8355e2ff75 | ||
|
|
e69b57d467 | ||
|
|
9075bad2d9 | ||
|
|
d3128aec24 | ||
|
|
833e68575a | ||
|
|
7b01c7d6ac | ||
|
|
08b27b8a26 | ||
| a3db0c959d | |||
|
|
13e4b53dda | ||
|
|
d09ebd0b64 | ||
|
|
61f963fd52 | ||
|
|
151f6b35cc | ||
|
|
7040984215 | ||
|
|
4a557ceb55 | ||
|
|
aec4336254 | ||
|
|
86be324fc0 | ||
|
|
babb81af52 | ||
|
|
0e8655cbb1 | ||
|
|
c2c6b58b41 | ||
|
|
b059bc7675 | ||
|
|
56c687253f | ||
|
|
69c59defdc | ||
|
|
9df52d84aa | ||
|
|
cef4cb2c5a | ||
|
|
75c2bcafce | ||
|
|
ba4c935182 | ||
|
|
d71c81e45d | ||
|
|
ae69b1a8a1 | ||
|
|
536f6249a6 | ||
|
|
515975edc5 | ||
|
|
a1ce3f74fa | ||
|
|
e5ffcd6535 | ||
|
|
b190563d80 | ||
|
|
62086949a4 |
37
.claude/settings.local.json
Normal file
37
.claude/settings.local.json
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"permissions": {
|
||||||
|
"allow": [
|
||||||
|
"Bash(dotnet --list-sdks:*)",
|
||||||
|
"Bash(winget install:*)",
|
||||||
|
"Bash(dotnet restore:*)",
|
||||||
|
"Bash(dotnet nuget:*)",
|
||||||
|
"Bash(csc -parse:*)",
|
||||||
|
"Bash(grep:*)",
|
||||||
|
"Bash(dotnet build:*)",
|
||||||
|
"Bash(cat:*)",
|
||||||
|
"Bash(copy:*)",
|
||||||
|
"Bash(dotnet test:*)",
|
||||||
|
"Bash(dir:*)",
|
||||||
|
"Bash(Select-Object -ExpandProperty FullName)",
|
||||||
|
"Bash(echo:*)",
|
||||||
|
"Bash(Out-File -FilePath \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Libraries\\StellaOps.Scanner.Surface\\StellaOps.Scanner.Surface.csproj\" -Encoding utf8)",
|
||||||
|
"Bash(wc:*)",
|
||||||
|
"Bash(find:*)",
|
||||||
|
"WebFetch(domain:docs.gradle.org)",
|
||||||
|
"WebSearch",
|
||||||
|
"Bash(dotnet msbuild:*)",
|
||||||
|
"Bash(test:*)",
|
||||||
|
"Bash(taskkill:*)",
|
||||||
|
"Bash(timeout /t)",
|
||||||
|
"Bash(dotnet clean:*)",
|
||||||
|
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\")",
|
||||||
|
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\")",
|
||||||
|
"Bash(rm:*)",
|
||||||
|
"Bash(if not exist \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\" mkdir \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\")",
|
||||||
|
"Bash(del \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\SPRINT_0510_0001_0001_airgap.md\")"
|
||||||
|
],
|
||||||
|
"deny": [],
|
||||||
|
"ask": []
|
||||||
|
},
|
||||||
|
"outputStyle": "default"
|
||||||
|
}
|
||||||
12
.config/dotnet-tools.json
Normal file
12
.config/dotnet-tools.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"version": 1,
|
||||||
|
"isRoot": true,
|
||||||
|
"tools": {
|
||||||
|
"dotnet-stryker": {
|
||||||
|
"version": "4.4.0",
|
||||||
|
"commands": [
|
||||||
|
"stryker"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
22
.dockerignore
Normal file
22
.dockerignore
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.gitea
|
||||||
|
.venv
|
||||||
|
bin
|
||||||
|
obj
|
||||||
|
**/bin
|
||||||
|
**/obj
|
||||||
|
.nuget
|
||||||
|
**/node_modules
|
||||||
|
**/dist
|
||||||
|
**/coverage
|
||||||
|
**/*.user
|
||||||
|
**/*.suo
|
||||||
|
**/*.cache
|
||||||
|
**/.vscode
|
||||||
|
**/.idea
|
||||||
|
**/.DS_Store
|
||||||
|
**/TestResults
|
||||||
|
**/out
|
||||||
|
**/packages
|
||||||
|
/tmp
|
||||||
5
.editorconfig
Normal file
5
.editorconfig
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
|
||||||
|
[src/Scanner/StellaOps.Scanner.Analyzers.Native/**.cs]
|
||||||
|
dotnet_diagnostic.CA2022.severity = none
|
||||||
|
[src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Native.Tests/**.cs]
|
||||||
|
dotnet_diagnostic.CA2022.severity = none
|
||||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -1,2 +1,5 @@
|
|||||||
# Ensure analyzer fixture assets keep LF endings for deterministic hashes
|
# Ensure analyzer fixture assets keep LF endings for deterministic hashes
|
||||||
src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/** text eol=lf
|
src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/** text eol=lf
|
||||||
|
|
||||||
|
# Ensure reachability sample assets keep LF endings for deterministic hashes
|
||||||
|
tests/reachability/samples-public/** text eol=lf
|
||||||
|
|||||||
22
.gitea/AGENTS.md
Normal file
22
.gitea/AGENTS.md
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# .gitea AGENTS
|
||||||
|
|
||||||
|
## Purpose & Scope
|
||||||
|
- Working directory: `.gitea/` (CI workflows, templates, pipeline configs).
|
||||||
|
- Roles: DevOps engineer, QA automation.
|
||||||
|
|
||||||
|
## Required Reading (treat as read before DOING)
|
||||||
|
- `docs/README.md`
|
||||||
|
- `docs/modules/ci/architecture.md`
|
||||||
|
- `docs/modules/devops/architecture.md`
|
||||||
|
- Relevant sprint file(s).
|
||||||
|
|
||||||
|
## Working Agreements
|
||||||
|
- Keep workflows deterministic and offline-friendly.
|
||||||
|
- Pin versions for tooling where possible.
|
||||||
|
- Use UTC timestamps in comments/logs.
|
||||||
|
- Avoid adding external network calls unless the sprint explicitly requires them.
|
||||||
|
- Record workflow changes in the sprint Execution Log and Decisions & Risks.
|
||||||
|
|
||||||
|
## Validation
|
||||||
|
- Manually validate YAML structure and paths.
|
||||||
|
- Ensure workflow paths match repository layout.
|
||||||
43
.gitea/scripts/build/build-airgap-bundle.sh
Normal file
43
.gitea/scripts/build/build-airgap-bundle.sh
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# DEVOPS-CONTAINERS-46-001: build air-gap bundle from existing buildx OCI archive
|
||||||
|
|
||||||
|
if [[ $# -lt 1 ]]; then
|
||||||
|
echo "Usage: $0 <image-tag> [bundle-dir]" >&2
|
||||||
|
exit 64
|
||||||
|
fi
|
||||||
|
|
||||||
|
IMAGE_TAG=$1
|
||||||
|
BUNDLE_DIR=${2:-"out/bundles/$(echo "$IMAGE_TAG" | tr '/:' '__')"}
|
||||||
|
SRC_DIR="out/buildx/$(echo "$IMAGE_TAG" | tr '/:' '__')"
|
||||||
|
OCI_ARCHIVE="${SRC_DIR}/image.oci"
|
||||||
|
|
||||||
|
if [[ ! -f "$OCI_ARCHIVE" ]]; then
|
||||||
|
echo "[airgap] OCI archive not found at $OCI_ARCHIVE. Run build-multiarch first." >&2
|
||||||
|
exit 66
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p "$BUNDLE_DIR"
|
||||||
|
|
||||||
|
SBOM_FILE=""
|
||||||
|
if [[ -f "${SRC_DIR}/sbom.syft.json" ]]; then
|
||||||
|
SBOM_FILE="${SRC_DIR}/sbom.syft.json"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat > "${BUNDLE_DIR}/bundle-manifest.json" <<EOF
|
||||||
|
{
|
||||||
|
"image": "${IMAGE_TAG}",
|
||||||
|
"oci_archive": "image.oci",
|
||||||
|
"sbom": "$( [[ -n "$SBOM_FILE" ]] && echo sbom.syft.json || echo null )",
|
||||||
|
"created_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cp "$OCI_ARCHIVE" "${BUNDLE_DIR}/image.oci"
|
||||||
|
[[ -n "$SBOM_FILE" ]] && cp "$SBOM_FILE" "${BUNDLE_DIR}/sbom.syft.json"
|
||||||
|
[[ -f "${SRC_DIR}/image.sha256" ]] && cp "${SRC_DIR}/image.sha256" "${BUNDLE_DIR}/image.sha256"
|
||||||
|
[[ -f "${SRC_DIR}/image.sig" ]] && cp "${SRC_DIR}/image.sig" "${BUNDLE_DIR}/image.sig"
|
||||||
|
|
||||||
|
tar -C "$BUNDLE_DIR" -czf "${BUNDLE_DIR}.tgz" .
|
||||||
|
echo "[airgap] bundle created at ${BUNDLE_DIR}.tgz"
|
||||||
131
.gitea/scripts/build/build-cli.sh
Normal file
131
.gitea/scripts/build/build-cli.sh
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# DEVOPS-CLI-41-001: Build multi-platform CLI binaries with SBOM and checksums.
|
||||||
|
# Updated: SPRINT_5100_0001_0001 - CLI Consolidation: includes Aoc and Symbols plugins
|
||||||
|
|
||||||
|
RIDS="${RIDS:-linux-x64,win-x64,osx-arm64}"
|
||||||
|
CONFIG="${CONFIG:-Release}"
|
||||||
|
PROJECT="src/Cli/StellaOps.Cli/StellaOps.Cli.csproj"
|
||||||
|
OUT_ROOT="out/cli"
|
||||||
|
SBOM_TOOL="${SBOM_TOOL:-syft}" # syft|none
|
||||||
|
SIGN="${SIGN:-false}"
|
||||||
|
COSIGN_KEY="${COSIGN_KEY:-}"
|
||||||
|
|
||||||
|
# CLI Plugins to include in the distribution
|
||||||
|
# SPRINT_5100_0001_0001: CLI Consolidation - stella aoc and stella symbols
|
||||||
|
PLUGIN_PROJECTS=(
|
||||||
|
"src/Cli/__Libraries/StellaOps.Cli.Plugins.Aoc/StellaOps.Cli.Plugins.Aoc.csproj"
|
||||||
|
"src/Cli/__Libraries/StellaOps.Cli.Plugins.Symbols/StellaOps.Cli.Plugins.Symbols.csproj"
|
||||||
|
)
|
||||||
|
PLUGIN_MANIFESTS=(
|
||||||
|
"src/Cli/plugins/cli/StellaOps.Cli.Plugins.Aoc/stellaops.cli.plugins.aoc.manifest.json"
|
||||||
|
"src/Cli/plugins/cli/StellaOps.Cli.Plugins.Symbols/stellaops.cli.plugins.symbols.manifest.json"
|
||||||
|
)
|
||||||
|
|
||||||
|
IFS=',' read -ra TARGETS <<< "$RIDS"
|
||||||
|
|
||||||
|
mkdir -p "$OUT_ROOT"
|
||||||
|
|
||||||
|
if ! command -v dotnet >/dev/null 2>&1; then
|
||||||
|
echo "[cli-build] dotnet CLI not found" >&2
|
||||||
|
exit 69
|
||||||
|
fi
|
||||||
|
|
||||||
|
generate_sbom() {
|
||||||
|
local dir="$1"
|
||||||
|
local sbom="$2"
|
||||||
|
if [[ "$SBOM_TOOL" == "syft" ]] && command -v syft >/dev/null 2>&1; then
|
||||||
|
syft "dir:${dir}" -o json > "$sbom"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
sign_file() {
|
||||||
|
local file="$1"
|
||||||
|
if [[ "$SIGN" == "true" && -n "$COSIGN_KEY" && -x "$(command -v cosign || true)" ]]; then
|
||||||
|
COSIGN_EXPERIMENTAL=1 cosign sign-blob --key "$COSIGN_KEY" --output-signature "${file}.sig" "$file"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
for rid in "${TARGETS[@]}"; do
|
||||||
|
echo "[cli-build] publishing for $rid"
|
||||||
|
out_dir="${OUT_ROOT}/${rid}"
|
||||||
|
publish_dir="${out_dir}/publish"
|
||||||
|
plugins_dir="${publish_dir}/plugins/cli"
|
||||||
|
mkdir -p "$publish_dir"
|
||||||
|
mkdir -p "$plugins_dir"
|
||||||
|
|
||||||
|
# Build main CLI
|
||||||
|
dotnet publish "$PROJECT" -c "$CONFIG" -r "$rid" \
|
||||||
|
-o "$publish_dir" \
|
||||||
|
--self-contained true \
|
||||||
|
-p:PublishSingleFile=true \
|
||||||
|
-p:PublishTrimmed=false \
|
||||||
|
-p:DebugType=None \
|
||||||
|
>/dev/null
|
||||||
|
|
||||||
|
# Build and copy plugins
|
||||||
|
# SPRINT_5100_0001_0001: CLI Consolidation
|
||||||
|
for i in "${!PLUGIN_PROJECTS[@]}"; do
|
||||||
|
plugin_project="${PLUGIN_PROJECTS[$i]}"
|
||||||
|
manifest_path="${PLUGIN_MANIFESTS[$i]}"
|
||||||
|
|
||||||
|
if [[ ! -f "$plugin_project" ]]; then
|
||||||
|
echo "[cli-build] WARNING: Plugin project not found: $plugin_project"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get plugin name from project path
|
||||||
|
plugin_name=$(basename "$(dirname "$plugin_project")")
|
||||||
|
plugin_out="${plugins_dir}/${plugin_name}"
|
||||||
|
mkdir -p "$plugin_out"
|
||||||
|
|
||||||
|
echo "[cli-build] building plugin: $plugin_name"
|
||||||
|
dotnet publish "$plugin_project" -c "$CONFIG" -r "$rid" \
|
||||||
|
-o "$plugin_out" \
|
||||||
|
--self-contained false \
|
||||||
|
-p:DebugType=None \
|
||||||
|
>/dev/null 2>&1 || echo "[cli-build] WARNING: Plugin build failed for $plugin_name (may have pre-existing errors)"
|
||||||
|
|
||||||
|
# Copy manifest file
|
||||||
|
if [[ -f "$manifest_path" ]]; then
|
||||||
|
cp "$manifest_path" "$plugin_out/"
|
||||||
|
else
|
||||||
|
echo "[cli-build] WARNING: Manifest not found: $manifest_path"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Package
|
||||||
|
archive_ext="tar.gz"
|
||||||
|
archive_cmd=(tar -C "$publish_dir" -czf)
|
||||||
|
if [[ "$rid" == win-* ]]; then
|
||||||
|
archive_ext="zip"
|
||||||
|
archive_cmd=(zip -jr)
|
||||||
|
fi
|
||||||
|
|
||||||
|
archive_name="stella-cli-${rid}.${archive_ext}"
|
||||||
|
archive_path="${out_dir}/${archive_name}"
|
||||||
|
"${archive_cmd[@]}" "$archive_path" "$publish_dir"
|
||||||
|
|
||||||
|
sha256sum "$archive_path" > "${archive_path}.sha256"
|
||||||
|
sign_file "$archive_path"
|
||||||
|
|
||||||
|
# SBOM
|
||||||
|
generate_sbom "$publish_dir" "${archive_path}.sbom.json"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Build manifest
|
||||||
|
manifest="${OUT_ROOT}/manifest.json"
|
||||||
|
plugin_list=$(printf '"%s",' "${PLUGIN_PROJECTS[@]}" | sed 's/,.*//' | sed 's/.*\///' | sed 's/\.csproj//')
|
||||||
|
cat > "$manifest" <<EOF
|
||||||
|
{
|
||||||
|
"generated_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
|
||||||
|
"config": "$CONFIG",
|
||||||
|
"rids": [$(printf '"%s",' "${TARGETS[@]}" | sed 's/,$//')],
|
||||||
|
"plugins": ["stellaops.cli.plugins.aoc", "stellaops.cli.plugins.symbols"],
|
||||||
|
"artifacts_root": "$OUT_ROOT",
|
||||||
|
"notes": "CLI Consolidation (SPRINT_5100_0001_0001) - includes aoc and symbols plugins"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "[cli-build] artifacts in $OUT_ROOT"
|
||||||
93
.gitea/scripts/build/build-multiarch.sh
Normal file
93
.gitea/scripts/build/build-multiarch.sh
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Multi-arch buildx helper for DEVOPS-CONTAINERS-44-001
|
||||||
|
# Requirements: docker CLI with buildx, optional syft (for SBOM) and cosign (for signing).
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
echo "Usage: $0 <image-tag> <context-dir> [--platform linux/amd64,linux/arm64] [--push] [--sbom syft|none] [--sign <cosign-key>]" >&2
|
||||||
|
exit 64
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
IMAGE_TAG=$1; shift
|
||||||
|
CONTEXT_DIR=$1; shift
|
||||||
|
|
||||||
|
PLATFORMS="linux/amd64,linux/arm64"
|
||||||
|
PUSH=false
|
||||||
|
SBOM_TOOL="syft"
|
||||||
|
COSIGN_KEY=""
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--platform) PLATFORMS="$2"; shift 2;;
|
||||||
|
--push) PUSH=true; shift;;
|
||||||
|
--sbom) SBOM_TOOL="$2"; shift 2;;
|
||||||
|
--sign) COSIGN_KEY="$2"; shift 2;;
|
||||||
|
*) echo "Unknown option: $1" >&2; usage;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if ! command -v docker >/dev/null 2>&1; then
|
||||||
|
echo "[buildx] docker CLI not found" >&2
|
||||||
|
exit 69
|
||||||
|
fi
|
||||||
|
|
||||||
|
OUT_ROOT="out/buildx/$(echo "$IMAGE_TAG" | tr '/:' '__')"
|
||||||
|
mkdir -p "$OUT_ROOT"
|
||||||
|
|
||||||
|
BUILDER_NAME="stellaops-multiarch"
|
||||||
|
if ! docker buildx inspect "$BUILDER_NAME" >/dev/null 2>&1; then
|
||||||
|
docker buildx create --name "$BUILDER_NAME" --driver docker-container --use >/dev/null
|
||||||
|
else
|
||||||
|
docker buildx use "$BUILDER_NAME" >/dev/null
|
||||||
|
fi
|
||||||
|
|
||||||
|
BUILD_OPTS=(
|
||||||
|
--platform "$PLATFORMS"
|
||||||
|
-t "$IMAGE_TAG"
|
||||||
|
--provenance=false
|
||||||
|
--sbom=false
|
||||||
|
--output "type=oci,dest=${OUT_ROOT}/image.oci"
|
||||||
|
)
|
||||||
|
|
||||||
|
if $PUSH; then
|
||||||
|
BUILD_OPTS+=("--push")
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "[buildx] building $IMAGE_TAG for $PLATFORMS"
|
||||||
|
docker buildx build "${BUILD_OPTS[@]}" "$CONTEXT_DIR"
|
||||||
|
|
||||||
|
echo "[buildx] computing digest"
|
||||||
|
IMAGE_DIGEST=$(sha256sum "${OUT_ROOT}/image.oci" | awk '{print $1}')
|
||||||
|
echo "$IMAGE_DIGEST image.oci" > "${OUT_ROOT}/image.sha256"
|
||||||
|
|
||||||
|
if [[ "$SBOM_TOOL" == "syft" ]] && command -v syft >/dev/null 2>&1; then
|
||||||
|
echo "[buildx] generating SBOM via syft"
|
||||||
|
syft "oci-archive:${OUT_ROOT}/image.oci" -o json > "${OUT_ROOT}/sbom.syft.json"
|
||||||
|
else
|
||||||
|
echo "[buildx] skipping SBOM (tool=$SBOM_TOOL, syft available? $(command -v syft >/dev/null && echo yes || echo no))"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -n "$COSIGN_KEY" ]] && command -v cosign >/dev/null 2>&1; then
|
||||||
|
echo "[buildx] signing digest with cosign key"
|
||||||
|
COSIGN_EXPERIMENTAL=1 cosign sign-blob --key "$COSIGN_KEY" --output-signature "${OUT_ROOT}/image.sig" --output-certificate "${OUT_ROOT}/image.cert" "${OUT_ROOT}/image.oci"
|
||||||
|
else
|
||||||
|
echo "[buildx] signature skipped (no key provided or cosign missing)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat > "${OUT_ROOT}/build-metadata.json" <<EOF
|
||||||
|
{
|
||||||
|
"image": "${IMAGE_TAG}",
|
||||||
|
"platforms": "${PLATFORMS}",
|
||||||
|
"pushed": ${PUSH},
|
||||||
|
"digest_sha256": "${IMAGE_DIGEST}",
|
||||||
|
"generated_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
|
||||||
|
"sbom": "$( [[ -f ${OUT_ROOT}/sbom.syft.json ]] && echo sbom.syft.json || echo null )"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "[buildx] artifacts written to ${OUT_ROOT}"
|
||||||
43
.gitea/scripts/evidence/signals-upload-evidence.sh
Normal file
43
.gitea/scripts/evidence/signals-upload-evidence.sh
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
STAGED_DIR="evidence-locker/signals/2025-12-05"
|
||||||
|
MODULE_ROOT="docs/modules/signals"
|
||||||
|
TAR_OUT="/tmp/signals-evidence.tar"
|
||||||
|
|
||||||
|
if [[ -z "${EVIDENCE_LOCKER_URL:-}" || -z "${CI_EVIDENCE_LOCKER_TOKEN:-}" ]]; then
|
||||||
|
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
tmpdir=$(mktemp -d)
|
||||||
|
trap 'rm -rf "$tmpdir"' EXIT
|
||||||
|
|
||||||
|
rsync -a --relative \
|
||||||
|
"$STAGED_DIR/SHA256SUMS" \
|
||||||
|
"$STAGED_DIR/confidence_decay_config.sigstore.json" \
|
||||||
|
"$STAGED_DIR/unknowns_scoring_manifest.sigstore.json" \
|
||||||
|
"$STAGED_DIR/heuristics_catalog.sigstore.json" \
|
||||||
|
"$MODULE_ROOT/decay/confidence_decay_config.yaml" \
|
||||||
|
"$MODULE_ROOT/unknowns/unknowns_scoring_manifest.json" \
|
||||||
|
"$MODULE_ROOT/heuristics/heuristics.catalog.json" \
|
||||||
|
"$tmpdir/"
|
||||||
|
|
||||||
|
pushd "$tmpdir/$STAGED_DIR" >/dev/null
|
||||||
|
sha256sum --check SHA256SUMS
|
||||||
|
popd >/dev/null
|
||||||
|
|
||||||
|
# Build deterministic tarball
|
||||||
|
pushd "$tmpdir" >/dev/null
|
||||||
|
tar --sort=name --mtime="UTC 1970-01-01" --owner=0 --group=0 --numeric-owner \
|
||||||
|
-cf "$TAR_OUT" .
|
||||||
|
popd >/dev/null
|
||||||
|
|
||||||
|
sha256sum "$TAR_OUT"
|
||||||
|
|
||||||
|
curl --retry 3 --retry-delay 2 --fail \
|
||||||
|
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
|
||||||
|
-X PUT "$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar" \
|
||||||
|
--data-binary "@$TAR_OUT"
|
||||||
|
|
||||||
|
echo "Uploaded $TAR_OUT to $EVIDENCE_LOCKER_URL/signals/2025-12-05/"
|
||||||
46
.gitea/scripts/evidence/upload-all-evidence.sh
Normal file
46
.gitea/scripts/evidence/upload-all-evidence.sh
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Upload both Zastava and Signals evidence bundles to the locker.
|
||||||
|
# Requires EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN.
|
||||||
|
|
||||||
|
EVIDENCE_LOCKER_URL=${EVIDENCE_LOCKER_URL:-}
|
||||||
|
CI_EVIDENCE_LOCKER_TOKEN=${CI_EVIDENCE_LOCKER_TOKEN:-}
|
||||||
|
|
||||||
|
if [[ -z "$EVIDENCE_LOCKER_URL" || -z "$CI_EVIDENCE_LOCKER_TOKEN" ]]; then
|
||||||
|
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Defaults
|
||||||
|
ZASTAVA_TAR=${ZASTAVA_TAR:-evidence-locker/zastava/2025-12-02/zastava-evidence.tar}
|
||||||
|
ZASTAVA_VERIFY=${ZASTAVA_VERIFY:-tools/zastava-verify-evidence-tar.sh}
|
||||||
|
ZASTAVA_PATH=\$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar
|
||||||
|
|
||||||
|
SIGNALS_TAR=${SIGNALS_TAR:-evidence-locker/signals/2025-12-05/signals-evidence.tar}
|
||||||
|
SIGNALS_VERIFY=${SIGNALS_VERIFY:-tools/signals-verify-evidence-tar.sh}
|
||||||
|
SIGNALS_PATH=\$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
if [[ -x "$ZASTAVA_VERIFY" ]]; then
|
||||||
|
"$ZASTAVA_VERIFY" "$ZASTAVA_TAR"
|
||||||
|
fi
|
||||||
|
if [[ -x "$SIGNALS_VERIFY" ]]; then
|
||||||
|
"$SIGNALS_VERIFY" "$SIGNALS_TAR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Upload Zastava
|
||||||
|
curl --retry 3 --retry-delay 2 --fail \
|
||||||
|
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
|
||||||
|
-X PUT "$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar" \
|
||||||
|
--data-binary @"$ZASTAVA_TAR"
|
||||||
|
|
||||||
|
echo "Uploaded Zastava evidence to $EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar"
|
||||||
|
|
||||||
|
# Upload Signals
|
||||||
|
curl --retry 3 --retry-delay 2 --fail \
|
||||||
|
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
|
||||||
|
-X PUT "$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar" \
|
||||||
|
--data-binary @"$SIGNALS_TAR"
|
||||||
|
|
||||||
|
echo "Uploaded Signals evidence to $EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar"
|
||||||
48
.gitea/scripts/evidence/zastava-upload-evidence.sh
Normal file
48
.gitea/scripts/evidence/zastava-upload-evidence.sh
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
if [[ -z "${EVIDENCE_LOCKER_URL:-}" || -z "${CI_EVIDENCE_LOCKER_TOKEN:-}" ]]; then
|
||||||
|
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
STAGED_DIR="evidence-locker/zastava/2025-12-02"
|
||||||
|
TAR_OUT="/tmp/zastava-evidence.tar"
|
||||||
|
MODULE_ROOT="docs/modules/zastava"
|
||||||
|
|
||||||
|
test -d "$MODULE_ROOT" || { echo "missing module root $MODULE_ROOT" >&2; exit 1; }
|
||||||
|
mkdir -p "$STAGED_DIR"
|
||||||
|
|
||||||
|
tmpdir=$(mktemp -d)
|
||||||
|
trap 'rm -rf "$tmpdir"' EXIT
|
||||||
|
|
||||||
|
rsync -a --relative \
|
||||||
|
"$MODULE_ROOT/SHA256SUMS" \
|
||||||
|
"$MODULE_ROOT/schemas/" \
|
||||||
|
"$MODULE_ROOT/exports/" \
|
||||||
|
"$MODULE_ROOT/thresholds.yaml" \
|
||||||
|
"$MODULE_ROOT/thresholds.yaml.dsse" \
|
||||||
|
"$MODULE_ROOT/kit/verify.sh" \
|
||||||
|
"$MODULE_ROOT/kit/README.md" \
|
||||||
|
"$MODULE_ROOT/kit/ed25519.pub" \
|
||||||
|
"$MODULE_ROOT/kit/zastava-kit.tzst" \
|
||||||
|
"$MODULE_ROOT/kit/zastava-kit.tzst.dsse" \
|
||||||
|
"$MODULE_ROOT/evidence/README.md" \
|
||||||
|
"$tmpdir/"
|
||||||
|
|
||||||
|
pushd "$tmpdir/docs/modules/zastava" >/dev/null
|
||||||
|
sha256sum --check SHA256SUMS
|
||||||
|
|
||||||
|
# Build deterministic tarball for reproducibility (payloads + DSSE)
|
||||||
|
tar --sort=name --mtime="UTC 1970-01-01" --owner=0 --group=0 --numeric-owner \
|
||||||
|
-cf "$TAR_OUT" .
|
||||||
|
popd >/dev/null
|
||||||
|
|
||||||
|
sha256sum "$TAR_OUT"
|
||||||
|
|
||||||
|
curl --retry 3 --retry-delay 2 --fail \
|
||||||
|
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
|
||||||
|
-X PUT "$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar" \
|
||||||
|
--data-binary "@$TAR_OUT"
|
||||||
|
|
||||||
|
echo "Uploaded $TAR_OUT to $EVIDENCE_LOCKER_URL/zastava/2025-12-02/"
|
||||||
287
.gitea/scripts/metrics/compute-reachability-metrics.sh
Normal file
287
.gitea/scripts/metrics/compute-reachability-metrics.sh
Normal file
@@ -0,0 +1,287 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# =============================================================================
|
||||||
|
# compute-reachability-metrics.sh
|
||||||
|
# Computes reachability metrics against ground-truth corpus
|
||||||
|
#
|
||||||
|
# Usage: ./compute-reachability-metrics.sh [options]
|
||||||
|
# --corpus-path PATH Path to ground-truth corpus (default: src/__Tests/reachability/corpus)
|
||||||
|
# --output FILE Output JSON file (default: stdout)
|
||||||
|
# --dry-run Show what would be computed without running scanner
|
||||||
|
# --strict Exit non-zero if any threshold is violated
|
||||||
|
# --verbose Enable verbose output
|
||||||
|
#
|
||||||
|
# Output: JSON with recall, precision, accuracy metrics per vulnerability class
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||||
|
|
||||||
|
# Default paths
|
||||||
|
CORPUS_PATH="${REPO_ROOT}/src/__Tests/reachability/corpus"
|
||||||
|
OUTPUT_FILE=""
|
||||||
|
DRY_RUN=false
|
||||||
|
STRICT=false
|
||||||
|
VERBOSE=false
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--corpus-path)
|
||||||
|
CORPUS_PATH="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--output)
|
||||||
|
OUTPUT_FILE="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--dry-run)
|
||||||
|
DRY_RUN=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--strict)
|
||||||
|
STRICT=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--verbose)
|
||||||
|
VERBOSE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
head -20 "$0" | tail -15
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
log() {
|
||||||
|
if [[ "${VERBOSE}" == "true" ]]; then
|
||||||
|
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*" >&2
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
error() {
|
||||||
|
echo "[ERROR] $*" >&2
|
||||||
|
}
|
||||||
|
|
||||||
|
# Validate corpus exists
|
||||||
|
if [[ ! -d "${CORPUS_PATH}" ]]; then
|
||||||
|
error "Corpus directory not found: ${CORPUS_PATH}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
MANIFEST_FILE="${CORPUS_PATH}/manifest.json"
|
||||||
|
if [[ ! -f "${MANIFEST_FILE}" ]]; then
|
||||||
|
error "Corpus manifest not found: ${MANIFEST_FILE}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "Loading corpus from ${CORPUS_PATH}"
|
||||||
|
log "Manifest: ${MANIFEST_FILE}"
|
||||||
|
|
||||||
|
# Initialize counters for each vulnerability class
|
||||||
|
declare -A true_positives
|
||||||
|
declare -A false_positives
|
||||||
|
declare -A false_negatives
|
||||||
|
declare -A total_expected
|
||||||
|
|
||||||
|
CLASSES=("runtime_dep" "os_pkg" "code" "config")
|
||||||
|
|
||||||
|
for class in "${CLASSES[@]}"; do
|
||||||
|
true_positives[$class]=0
|
||||||
|
false_positives[$class]=0
|
||||||
|
false_negatives[$class]=0
|
||||||
|
total_expected[$class]=0
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ "${DRY_RUN}" == "true" ]]; then
|
||||||
|
log "[DRY RUN] Would process corpus fixtures..."
|
||||||
|
|
||||||
|
# Generate mock metrics for dry-run
|
||||||
|
cat <<EOF
|
||||||
|
{
|
||||||
|
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||||
|
"corpus_path": "${CORPUS_PATH}",
|
||||||
|
"dry_run": true,
|
||||||
|
"metrics": {
|
||||||
|
"runtime_dep": {
|
||||||
|
"recall": 0.96,
|
||||||
|
"precision": 0.94,
|
||||||
|
"f1_score": 0.95,
|
||||||
|
"total_expected": 100,
|
||||||
|
"true_positives": 96,
|
||||||
|
"false_positives": 6,
|
||||||
|
"false_negatives": 4
|
||||||
|
},
|
||||||
|
"os_pkg": {
|
||||||
|
"recall": 0.98,
|
||||||
|
"precision": 0.97,
|
||||||
|
"f1_score": 0.975,
|
||||||
|
"total_expected": 50,
|
||||||
|
"true_positives": 49,
|
||||||
|
"false_positives": 2,
|
||||||
|
"false_negatives": 1
|
||||||
|
},
|
||||||
|
"code": {
|
||||||
|
"recall": 0.92,
|
||||||
|
"precision": 0.90,
|
||||||
|
"f1_score": 0.91,
|
||||||
|
"total_expected": 25,
|
||||||
|
"true_positives": 23,
|
||||||
|
"false_positives": 3,
|
||||||
|
"false_negatives": 2
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"recall": 0.88,
|
||||||
|
"precision": 0.85,
|
||||||
|
"f1_score": 0.865,
|
||||||
|
"total_expected": 20,
|
||||||
|
"true_positives": 18,
|
||||||
|
"false_positives": 3,
|
||||||
|
"false_negatives": 2
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"aggregate": {
|
||||||
|
"overall_recall": 0.9538,
|
||||||
|
"overall_precision": 0.9302,
|
||||||
|
"reachability_accuracy": 0.9268
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Process each fixture in the corpus
|
||||||
|
log "Processing corpus fixtures..."
|
||||||
|
|
||||||
|
# Read manifest and iterate fixtures
|
||||||
|
FIXTURE_COUNT=$(jq -r '.fixtures | length' "${MANIFEST_FILE}")
|
||||||
|
log "Found ${FIXTURE_COUNT} fixtures"
|
||||||
|
|
||||||
|
for i in $(seq 0 $((FIXTURE_COUNT - 1))); do
|
||||||
|
FIXTURE_ID=$(jq -r ".fixtures[$i].id" "${MANIFEST_FILE}")
|
||||||
|
FIXTURE_PATH="${CORPUS_PATH}/$(jq -r ".fixtures[$i].path" "${MANIFEST_FILE}")"
|
||||||
|
FIXTURE_CLASS=$(jq -r ".fixtures[$i].class" "${MANIFEST_FILE}")
|
||||||
|
EXPECTED_REACHABLE=$(jq -r ".fixtures[$i].expected_reachable // 0" "${MANIFEST_FILE}")
|
||||||
|
EXPECTED_UNREACHABLE=$(jq -r ".fixtures[$i].expected_unreachable // 0" "${MANIFEST_FILE}")
|
||||||
|
|
||||||
|
log "Processing fixture: ${FIXTURE_ID} (class: ${FIXTURE_CLASS})"
|
||||||
|
|
||||||
|
if [[ ! -d "${FIXTURE_PATH}" ]] && [[ ! -f "${FIXTURE_PATH}" ]]; then
|
||||||
|
error "Fixture not found: ${FIXTURE_PATH}"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Update expected counts
|
||||||
|
total_expected[$FIXTURE_CLASS]=$((${total_expected[$FIXTURE_CLASS]} + EXPECTED_REACHABLE))
|
||||||
|
|
||||||
|
# Run scanner on fixture (deterministic mode, offline)
|
||||||
|
SCAN_RESULT_FILE=$(mktemp)
|
||||||
|
trap "rm -f ${SCAN_RESULT_FILE}" EXIT
|
||||||
|
|
||||||
|
if dotnet run --project "${REPO_ROOT}/src/Scanner/StellaOps.Scanner.Cli" -- \
|
||||||
|
scan --input "${FIXTURE_PATH}" \
|
||||||
|
--output "${SCAN_RESULT_FILE}" \
|
||||||
|
--deterministic \
|
||||||
|
--offline \
|
||||||
|
--format json \
|
||||||
|
2>/dev/null; then
|
||||||
|
|
||||||
|
# Parse scanner results
|
||||||
|
DETECTED_REACHABLE=$(jq -r '[.findings[] | select(.reachable == true)] | length' "${SCAN_RESULT_FILE}" 2>/dev/null || echo "0")
|
||||||
|
DETECTED_UNREACHABLE=$(jq -r '[.findings[] | select(.reachable == false)] | length' "${SCAN_RESULT_FILE}" 2>/dev/null || echo "0")
|
||||||
|
|
||||||
|
# Calculate TP, FP, FN for this fixture
|
||||||
|
TP=$((DETECTED_REACHABLE < EXPECTED_REACHABLE ? DETECTED_REACHABLE : EXPECTED_REACHABLE))
|
||||||
|
FP=$((DETECTED_REACHABLE > EXPECTED_REACHABLE ? DETECTED_REACHABLE - EXPECTED_REACHABLE : 0))
|
||||||
|
FN=$((EXPECTED_REACHABLE - TP))
|
||||||
|
|
||||||
|
true_positives[$FIXTURE_CLASS]=$((${true_positives[$FIXTURE_CLASS]} + TP))
|
||||||
|
false_positives[$FIXTURE_CLASS]=$((${false_positives[$FIXTURE_CLASS]} + FP))
|
||||||
|
false_negatives[$FIXTURE_CLASS]=$((${false_negatives[$FIXTURE_CLASS]} + FN))
|
||||||
|
else
|
||||||
|
error "Scanner failed for fixture: ${FIXTURE_ID}"
|
||||||
|
false_negatives[$FIXTURE_CLASS]=$((${false_negatives[$FIXTURE_CLASS]} + EXPECTED_REACHABLE))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Calculate metrics per class
|
||||||
|
calculate_metrics() {
|
||||||
|
local class=$1
|
||||||
|
local tp=${true_positives[$class]}
|
||||||
|
local fp=${false_positives[$class]}
|
||||||
|
local fn=${false_negatives[$class]}
|
||||||
|
local total=${total_expected[$class]}
|
||||||
|
|
||||||
|
local recall=0
|
||||||
|
local precision=0
|
||||||
|
local f1=0
|
||||||
|
|
||||||
|
if [[ $((tp + fn)) -gt 0 ]]; then
|
||||||
|
recall=$(echo "scale=4; $tp / ($tp + $fn)" | bc)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $((tp + fp)) -gt 0 ]]; then
|
||||||
|
precision=$(echo "scale=4; $tp / ($tp + $fp)" | bc)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if (( $(echo "$recall + $precision > 0" | bc -l) )); then
|
||||||
|
f1=$(echo "scale=4; 2 * $recall * $precision / ($recall + $precision)" | bc)
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "{\"recall\": $recall, \"precision\": $precision, \"f1_score\": $f1, \"total_expected\": $total, \"true_positives\": $tp, \"false_positives\": $fp, \"false_negatives\": $fn}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Generate output JSON
|
||||||
|
OUTPUT=$(cat <<EOF
|
||||||
|
{
|
||||||
|
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||||
|
"corpus_path": "${CORPUS_PATH}",
|
||||||
|
"dry_run": false,
|
||||||
|
"metrics": {
|
||||||
|
"runtime_dep": $(calculate_metrics "runtime_dep"),
|
||||||
|
"os_pkg": $(calculate_metrics "os_pkg"),
|
||||||
|
"code": $(calculate_metrics "code"),
|
||||||
|
"config": $(calculate_metrics "config")
|
||||||
|
},
|
||||||
|
"aggregate": {
|
||||||
|
"overall_recall": $(echo "scale=4; (${true_positives[runtime_dep]} + ${true_positives[os_pkg]} + ${true_positives[code]} + ${true_positives[config]}) / (${total_expected[runtime_dep]} + ${total_expected[os_pkg]} + ${total_expected[code]} + ${total_expected[config]} + 0.0001)" | bc),
|
||||||
|
"overall_precision": $(echo "scale=4; (${true_positives[runtime_dep]} + ${true_positives[os_pkg]} + ${true_positives[code]} + ${true_positives[config]}) / (${true_positives[runtime_dep]} + ${true_positives[os_pkg]} + ${true_positives[code]} + ${true_positives[config]} + ${false_positives[runtime_dep]} + ${false_positives[os_pkg]} + ${false_positives[code]} + ${false_positives[config]} + 0.0001)" | bc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
# Output results
|
||||||
|
if [[ -n "${OUTPUT_FILE}" ]]; then
|
||||||
|
echo "${OUTPUT}" > "${OUTPUT_FILE}"
|
||||||
|
log "Results written to ${OUTPUT_FILE}"
|
||||||
|
else
|
||||||
|
echo "${OUTPUT}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check thresholds in strict mode
|
||||||
|
if [[ "${STRICT}" == "true" ]]; then
|
||||||
|
THRESHOLDS_FILE="${SCRIPT_DIR}/reachability-thresholds.yaml"
|
||||||
|
if [[ -f "${THRESHOLDS_FILE}" ]]; then
|
||||||
|
log "Checking thresholds from ${THRESHOLDS_FILE}"
|
||||||
|
|
||||||
|
# Extract thresholds and check
|
||||||
|
MIN_RECALL=$(yq -r '.thresholds.runtime_dependency_recall.min // 0.95' "${THRESHOLDS_FILE}")
|
||||||
|
ACTUAL_RECALL=$(echo "${OUTPUT}" | jq -r '.metrics.runtime_dep.recall')
|
||||||
|
|
||||||
|
if (( $(echo "$ACTUAL_RECALL < $MIN_RECALL" | bc -l) )); then
|
||||||
|
error "Runtime dependency recall ${ACTUAL_RECALL} below threshold ${MIN_RECALL}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "All thresholds passed"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
313
.gitea/scripts/metrics/compute-ttfs-metrics.sh
Normal file
313
.gitea/scripts/metrics/compute-ttfs-metrics.sh
Normal file
@@ -0,0 +1,313 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# =============================================================================
|
||||||
|
# compute-ttfs-metrics.sh
|
||||||
|
# Computes Time-to-First-Signal (TTFS) metrics from test runs
|
||||||
|
#
|
||||||
|
# Usage: ./compute-ttfs-metrics.sh [options]
|
||||||
|
# --results-path PATH Path to test results directory
|
||||||
|
# --output FILE Output JSON file (default: stdout)
|
||||||
|
# --baseline FILE Baseline TTFS file for comparison
|
||||||
|
# --dry-run Show what would be computed
|
||||||
|
# --strict Exit non-zero if thresholds are violated
|
||||||
|
# --verbose Enable verbose output
|
||||||
|
#
|
||||||
|
# Output: JSON with TTFS p50, p95, p99 metrics and regression status
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||||
|
|
||||||
|
# Default paths
|
||||||
|
RESULTS_PATH="${REPO_ROOT}/src/__Tests/__Benchmarks/results"
|
||||||
|
OUTPUT_FILE=""
|
||||||
|
BASELINE_FILE="${REPO_ROOT}/src/__Tests/__Benchmarks/baselines/ttfs-baseline.json"
|
||||||
|
DRY_RUN=false
|
||||||
|
STRICT=false
|
||||||
|
VERBOSE=false
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--results-path)
|
||||||
|
RESULTS_PATH="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--output)
|
||||||
|
OUTPUT_FILE="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--baseline)
|
||||||
|
BASELINE_FILE="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--dry-run)
|
||||||
|
DRY_RUN=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--strict)
|
||||||
|
STRICT=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--verbose)
|
||||||
|
VERBOSE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
head -20 "$0" | tail -15
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
log() {
|
||||||
|
if [[ "${VERBOSE}" == "true" ]]; then
|
||||||
|
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*" >&2
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
error() {
|
||||||
|
echo "[ERROR] $*" >&2
|
||||||
|
}
|
||||||
|
|
||||||
|
warn() {
|
||||||
|
echo "[WARN] $*" >&2
|
||||||
|
}
|
||||||
|
|
||||||
|
# Calculate percentiles from sorted array
|
||||||
|
percentile() {
|
||||||
|
local -n arr=$1
|
||||||
|
local p=$2
|
||||||
|
local n=${#arr[@]}
|
||||||
|
|
||||||
|
if [[ $n -eq 0 ]]; then
|
||||||
|
echo "0"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
local idx=$(echo "scale=0; ($n - 1) * $p / 100" | bc)
|
||||||
|
echo "${arr[$idx]}"
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ "${DRY_RUN}" == "true" ]]; then
|
||||||
|
log "[DRY RUN] Would process TTFS metrics..."
|
||||||
|
|
||||||
|
cat <<EOF
|
||||||
|
{
|
||||||
|
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||||
|
"dry_run": true,
|
||||||
|
"results_path": "${RESULTS_PATH}",
|
||||||
|
"metrics": {
|
||||||
|
"ttfs_ms": {
|
||||||
|
"p50": 1250,
|
||||||
|
"p95": 3500,
|
||||||
|
"p99": 5200,
|
||||||
|
"min": 450,
|
||||||
|
"max": 8500,
|
||||||
|
"mean": 1850,
|
||||||
|
"sample_count": 100
|
||||||
|
},
|
||||||
|
"by_scan_type": {
|
||||||
|
"image_scan": {
|
||||||
|
"p50": 2100,
|
||||||
|
"p95": 4500,
|
||||||
|
"p99": 6800
|
||||||
|
},
|
||||||
|
"filesystem_scan": {
|
||||||
|
"p50": 850,
|
||||||
|
"p95": 1800,
|
||||||
|
"p99": 2500
|
||||||
|
},
|
||||||
|
"sbom_scan": {
|
||||||
|
"p50": 320,
|
||||||
|
"p95": 650,
|
||||||
|
"p99": 950
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"baseline_comparison": {
|
||||||
|
"baseline_path": "${BASELINE_FILE}",
|
||||||
|
"p50_regression_pct": -2.5,
|
||||||
|
"p95_regression_pct": 1.2,
|
||||||
|
"regression_detected": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate results directory
|
||||||
|
if [[ ! -d "${RESULTS_PATH}" ]]; then
|
||||||
|
error "Results directory not found: ${RESULTS_PATH}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "Processing TTFS results from ${RESULTS_PATH}"
|
||||||
|
|
||||||
|
# Collect all TTFS values from result files
|
||||||
|
declare -a ttfs_values=()
|
||||||
|
declare -a image_ttfs=()
|
||||||
|
declare -a fs_ttfs=()
|
||||||
|
declare -a sbom_ttfs=()
|
||||||
|
|
||||||
|
# Find and process all result files
|
||||||
|
for result_file in "${RESULTS_PATH}"/*.json "${RESULTS_PATH}"/**/*.json; do
|
||||||
|
[[ -f "${result_file}" ]] || continue
|
||||||
|
|
||||||
|
log "Processing: ${result_file}"
|
||||||
|
|
||||||
|
# Extract TTFS value if present
|
||||||
|
TTFS=$(jq -r '.ttfs_ms // .time_to_first_signal_ms // empty' "${result_file}" 2>/dev/null || true)
|
||||||
|
SCAN_TYPE=$(jq -r '.scan_type // "unknown"' "${result_file}" 2>/dev/null || echo "unknown")
|
||||||
|
|
||||||
|
if [[ -n "${TTFS}" ]] && [[ "${TTFS}" != "null" ]]; then
|
||||||
|
ttfs_values+=("${TTFS}")
|
||||||
|
|
||||||
|
case "${SCAN_TYPE}" in
|
||||||
|
image|image_scan|container)
|
||||||
|
image_ttfs+=("${TTFS}")
|
||||||
|
;;
|
||||||
|
filesystem|fs|fs_scan)
|
||||||
|
fs_ttfs+=("${TTFS}")
|
||||||
|
;;
|
||||||
|
sbom|sbom_scan)
|
||||||
|
sbom_ttfs+=("${TTFS}")
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Sort arrays for percentile calculation
|
||||||
|
IFS=$'\n' ttfs_sorted=($(sort -n <<<"${ttfs_values[*]}")); unset IFS
|
||||||
|
IFS=$'\n' image_sorted=($(sort -n <<<"${image_ttfs[*]}")); unset IFS
|
||||||
|
IFS=$'\n' fs_sorted=($(sort -n <<<"${fs_ttfs[*]}")); unset IFS
|
||||||
|
IFS=$'\n' sbom_sorted=($(sort -n <<<"${sbom_ttfs[*]}")); unset IFS
|
||||||
|
|
||||||
|
# Calculate overall metrics
|
||||||
|
SAMPLE_COUNT=${#ttfs_values[@]}
|
||||||
|
if [[ $SAMPLE_COUNT -eq 0 ]]; then
|
||||||
|
warn "No TTFS samples found"
|
||||||
|
P50=0
|
||||||
|
P95=0
|
||||||
|
P99=0
|
||||||
|
MIN=0
|
||||||
|
MAX=0
|
||||||
|
MEAN=0
|
||||||
|
else
|
||||||
|
P50=$(percentile ttfs_sorted 50)
|
||||||
|
P95=$(percentile ttfs_sorted 95)
|
||||||
|
P99=$(percentile ttfs_sorted 99)
|
||||||
|
MIN=${ttfs_sorted[0]}
|
||||||
|
MAX=${ttfs_sorted[-1]}
|
||||||
|
|
||||||
|
# Calculate mean
|
||||||
|
SUM=0
|
||||||
|
for v in "${ttfs_values[@]}"; do
|
||||||
|
SUM=$((SUM + v))
|
||||||
|
done
|
||||||
|
MEAN=$((SUM / SAMPLE_COUNT))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Calculate per-type metrics
|
||||||
|
IMAGE_P50=$(percentile image_sorted 50)
|
||||||
|
IMAGE_P95=$(percentile image_sorted 95)
|
||||||
|
IMAGE_P99=$(percentile image_sorted 99)
|
||||||
|
|
||||||
|
FS_P50=$(percentile fs_sorted 50)
|
||||||
|
FS_P95=$(percentile fs_sorted 95)
|
||||||
|
FS_P99=$(percentile fs_sorted 99)
|
||||||
|
|
||||||
|
SBOM_P50=$(percentile sbom_sorted 50)
|
||||||
|
SBOM_P95=$(percentile sbom_sorted 95)
|
||||||
|
SBOM_P99=$(percentile sbom_sorted 99)
|
||||||
|
|
||||||
|
# Compare against baseline if available
|
||||||
|
REGRESSION_DETECTED=false
|
||||||
|
P50_REGRESSION_PCT=0
|
||||||
|
P95_REGRESSION_PCT=0
|
||||||
|
|
||||||
|
if [[ -f "${BASELINE_FILE}" ]]; then
|
||||||
|
log "Comparing against baseline: ${BASELINE_FILE}"
|
||||||
|
|
||||||
|
BASELINE_P50=$(jq -r '.metrics.ttfs_ms.p50 // 0' "${BASELINE_FILE}")
|
||||||
|
BASELINE_P95=$(jq -r '.metrics.ttfs_ms.p95 // 0' "${BASELINE_FILE}")
|
||||||
|
|
||||||
|
if [[ $BASELINE_P50 -gt 0 ]]; then
|
||||||
|
P50_REGRESSION_PCT=$(echo "scale=2; (${P50} - ${BASELINE_P50}) * 100 / ${BASELINE_P50}" | bc)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $BASELINE_P95 -gt 0 ]]; then
|
||||||
|
P95_REGRESSION_PCT=$(echo "scale=2; (${P95} - ${BASELINE_P95}) * 100 / ${BASELINE_P95}" | bc)
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for regression (>10% increase)
|
||||||
|
if (( $(echo "${P50_REGRESSION_PCT} > 10" | bc -l) )) || (( $(echo "${P95_REGRESSION_PCT} > 10" | bc -l) )); then
|
||||||
|
REGRESSION_DETECTED=true
|
||||||
|
warn "TTFS regression detected: p50=${P50_REGRESSION_PCT}%, p95=${P95_REGRESSION_PCT}%"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate output
|
||||||
|
OUTPUT=$(cat <<EOF
|
||||||
|
{
|
||||||
|
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||||
|
"dry_run": false,
|
||||||
|
"results_path": "${RESULTS_PATH}",
|
||||||
|
"metrics": {
|
||||||
|
"ttfs_ms": {
|
||||||
|
"p50": ${P50},
|
||||||
|
"p95": ${P95},
|
||||||
|
"p99": ${P99},
|
||||||
|
"min": ${MIN},
|
||||||
|
"max": ${MAX},
|
||||||
|
"mean": ${MEAN},
|
||||||
|
"sample_count": ${SAMPLE_COUNT}
|
||||||
|
},
|
||||||
|
"by_scan_type": {
|
||||||
|
"image_scan": {
|
||||||
|
"p50": ${IMAGE_P50:-0},
|
||||||
|
"p95": ${IMAGE_P95:-0},
|
||||||
|
"p99": ${IMAGE_P99:-0}
|
||||||
|
},
|
||||||
|
"filesystem_scan": {
|
||||||
|
"p50": ${FS_P50:-0},
|
||||||
|
"p95": ${FS_P95:-0},
|
||||||
|
"p99": ${FS_P99:-0}
|
||||||
|
},
|
||||||
|
"sbom_scan": {
|
||||||
|
"p50": ${SBOM_P50:-0},
|
||||||
|
"p95": ${SBOM_P95:-0},
|
||||||
|
"p99": ${SBOM_P99:-0}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"baseline_comparison": {
|
||||||
|
"baseline_path": "${BASELINE_FILE}",
|
||||||
|
"p50_regression_pct": ${P50_REGRESSION_PCT},
|
||||||
|
"p95_regression_pct": ${P95_REGRESSION_PCT},
|
||||||
|
"regression_detected": ${REGRESSION_DETECTED}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
# Output results
|
||||||
|
if [[ -n "${OUTPUT_FILE}" ]]; then
|
||||||
|
echo "${OUTPUT}" > "${OUTPUT_FILE}"
|
||||||
|
log "Results written to ${OUTPUT_FILE}"
|
||||||
|
else
|
||||||
|
echo "${OUTPUT}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Strict mode: fail on regression
|
||||||
|
if [[ "${STRICT}" == "true" ]] && [[ "${REGRESSION_DETECTED}" == "true" ]]; then
|
||||||
|
error "TTFS regression exceeds threshold"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
326
.gitea/scripts/metrics/enforce-performance-slos.sh
Normal file
326
.gitea/scripts/metrics/enforce-performance-slos.sh
Normal file
@@ -0,0 +1,326 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# =============================================================================
|
||||||
|
# enforce-performance-slos.sh
|
||||||
|
# Enforces scan time and compute budget SLOs in CI
|
||||||
|
#
|
||||||
|
# Usage: ./enforce-performance-slos.sh [options]
|
||||||
|
# --results-path PATH Path to benchmark results directory
|
||||||
|
# --slos-file FILE Path to SLO definitions (default: scripts/ci/performance-slos.yaml)
|
||||||
|
# --output FILE Output JSON file (default: stdout)
|
||||||
|
# --dry-run Show what would be enforced
|
||||||
|
# --strict Exit non-zero if any SLO is violated
|
||||||
|
# --verbose Enable verbose output
|
||||||
|
#
|
||||||
|
# Output: JSON with SLO evaluation results and violations
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||||
|
|
||||||
|
# Default paths
|
||||||
|
RESULTS_PATH="${REPO_ROOT}/src/__Tests/__Benchmarks/results"
|
||||||
|
SLOS_FILE="${SCRIPT_DIR}/performance-slos.yaml"
|
||||||
|
OUTPUT_FILE=""
|
||||||
|
DRY_RUN=false
|
||||||
|
STRICT=false
|
||||||
|
VERBOSE=false
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--results-path)
|
||||||
|
RESULTS_PATH="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--slos-file)
|
||||||
|
SLOS_FILE="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--output)
|
||||||
|
OUTPUT_FILE="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--dry-run)
|
||||||
|
DRY_RUN=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--strict)
|
||||||
|
STRICT=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--verbose)
|
||||||
|
VERBOSE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
head -20 "$0" | tail -15
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
log() {
|
||||||
|
if [[ "${VERBOSE}" == "true" ]]; then
|
||||||
|
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*" >&2
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
error() {
|
||||||
|
echo "[ERROR] $*" >&2
|
||||||
|
}
|
||||||
|
|
||||||
|
warn() {
|
||||||
|
echo "[WARN] $*" >&2
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ "${DRY_RUN}" == "true" ]]; then
|
||||||
|
log "[DRY RUN] Would enforce performance SLOs..."
|
||||||
|
|
||||||
|
cat <<EOF
|
||||||
|
{
|
||||||
|
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||||
|
"dry_run": true,
|
||||||
|
"results_path": "${RESULTS_PATH}",
|
||||||
|
"slos_file": "${SLOS_FILE}",
|
||||||
|
"slo_evaluations": {
|
||||||
|
"scan_time_p95": {
|
||||||
|
"slo_name": "Scan Time P95",
|
||||||
|
"threshold_ms": 30000,
|
||||||
|
"actual_ms": 25000,
|
||||||
|
"passed": true,
|
||||||
|
"margin_pct": 16.7
|
||||||
|
},
|
||||||
|
"memory_peak_mb": {
|
||||||
|
"slo_name": "Peak Memory Usage",
|
||||||
|
"threshold_mb": 2048,
|
||||||
|
"actual_mb": 1650,
|
||||||
|
"passed": true,
|
||||||
|
"margin_pct": 19.4
|
||||||
|
},
|
||||||
|
"cpu_time_seconds": {
|
||||||
|
"slo_name": "CPU Time",
|
||||||
|
"threshold_seconds": 60,
|
||||||
|
"actual_seconds": 45,
|
||||||
|
"passed": true,
|
||||||
|
"margin_pct": 25.0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"summary": {
|
||||||
|
"total_slos": 3,
|
||||||
|
"passed": 3,
|
||||||
|
"failed": 0,
|
||||||
|
"all_passed": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate paths
|
||||||
|
if [[ ! -d "${RESULTS_PATH}" ]]; then
|
||||||
|
error "Results directory not found: ${RESULTS_PATH}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -f "${SLOS_FILE}" ]]; then
|
||||||
|
warn "SLOs file not found: ${SLOS_FILE}, using defaults"
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "Enforcing SLOs from ${SLOS_FILE}"
|
||||||
|
log "Results path: ${RESULTS_PATH}"
|
||||||
|
|
||||||
|
# Initialize evaluation results
|
||||||
|
declare -A slo_results
|
||||||
|
VIOLATIONS=()
|
||||||
|
TOTAL_SLOS=0
|
||||||
|
PASSED_SLOS=0
|
||||||
|
|
||||||
|
# Define default SLOs
|
||||||
|
declare -A SLOS
|
||||||
|
SLOS["scan_time_p95_ms"]=30000
|
||||||
|
SLOS["scan_time_p99_ms"]=60000
|
||||||
|
SLOS["memory_peak_mb"]=2048
|
||||||
|
SLOS["cpu_time_seconds"]=120
|
||||||
|
SLOS["sbom_gen_time_ms"]=10000
|
||||||
|
SLOS["policy_eval_time_ms"]=5000
|
||||||
|
|
||||||
|
# Load SLOs from file if exists
|
||||||
|
if [[ -f "${SLOS_FILE}" ]]; then
|
||||||
|
while IFS=: read -r key value; do
|
||||||
|
key=$(echo "$key" | tr -d ' ')
|
||||||
|
value=$(echo "$value" | tr -d ' ')
|
||||||
|
if [[ -n "$key" ]] && [[ -n "$value" ]] && [[ "$key" != "#"* ]]; then
|
||||||
|
SLOS["$key"]=$value
|
||||||
|
log "Loaded SLO: ${key}=${value}"
|
||||||
|
fi
|
||||||
|
done < <(yq -r 'to_entries | .[] | "\(.key):\(.value.threshold // .value)"' "${SLOS_FILE}" 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Collect metrics from results
|
||||||
|
SCAN_TIMES=()
|
||||||
|
MEMORY_VALUES=()
|
||||||
|
CPU_TIMES=()
|
||||||
|
SBOM_TIMES=()
|
||||||
|
POLICY_TIMES=()
|
||||||
|
|
||||||
|
for result_file in "${RESULTS_PATH}"/*.json "${RESULTS_PATH}"/**/*.json; do
|
||||||
|
[[ -f "${result_file}" ]] || continue
|
||||||
|
|
||||||
|
log "Processing: ${result_file}"
|
||||||
|
|
||||||
|
# Extract metrics
|
||||||
|
SCAN_TIME=$(jq -r '.duration_ms // .scan_time_ms // empty' "${result_file}" 2>/dev/null || true)
|
||||||
|
MEMORY=$(jq -r '.peak_memory_mb // .memory_mb // empty' "${result_file}" 2>/dev/null || true)
|
||||||
|
CPU_TIME=$(jq -r '.cpu_time_seconds // .cpu_seconds // empty' "${result_file}" 2>/dev/null || true)
|
||||||
|
SBOM_TIME=$(jq -r '.sbom_generation_ms // empty' "${result_file}" 2>/dev/null || true)
|
||||||
|
POLICY_TIME=$(jq -r '.policy_evaluation_ms // empty' "${result_file}" 2>/dev/null || true)
|
||||||
|
|
||||||
|
[[ -n "${SCAN_TIME}" ]] && SCAN_TIMES+=("${SCAN_TIME}")
|
||||||
|
[[ -n "${MEMORY}" ]] && MEMORY_VALUES+=("${MEMORY}")
|
||||||
|
[[ -n "${CPU_TIME}" ]] && CPU_TIMES+=("${CPU_TIME}")
|
||||||
|
[[ -n "${SBOM_TIME}" ]] && SBOM_TIMES+=("${SBOM_TIME}")
|
||||||
|
[[ -n "${POLICY_TIME}" ]] && POLICY_TIMES+=("${POLICY_TIME}")
|
||||||
|
done
|
||||||
|
|
||||||
|
# Helper: calculate percentile from array
|
||||||
|
calc_percentile() {
|
||||||
|
local -n values=$1
|
||||||
|
local pct=$2
|
||||||
|
|
||||||
|
if [[ ${#values[@]} -eq 0 ]]; then
|
||||||
|
echo "0"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
IFS=$'\n' sorted=($(sort -n <<<"${values[*]}")); unset IFS
|
||||||
|
local n=${#sorted[@]}
|
||||||
|
local idx=$(echo "scale=0; ($n - 1) * $pct / 100" | bc)
|
||||||
|
echo "${sorted[$idx]}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper: calculate max from array
|
||||||
|
calc_max() {
|
||||||
|
local -n values=$1
|
||||||
|
|
||||||
|
if [[ ${#values[@]} -eq 0 ]]; then
|
||||||
|
echo "0"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
local max=0
|
||||||
|
for v in "${values[@]}"; do
|
||||||
|
if (( $(echo "$v > $max" | bc -l) )); then
|
||||||
|
max=$v
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo "$max"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Evaluate each SLO
|
||||||
|
evaluate_slo() {
|
||||||
|
local name=$1
|
||||||
|
local threshold=$2
|
||||||
|
local actual=$3
|
||||||
|
local unit=$4
|
||||||
|
|
||||||
|
((TOTAL_SLOS++))
|
||||||
|
|
||||||
|
local passed=true
|
||||||
|
local margin_pct=0
|
||||||
|
|
||||||
|
if (( $(echo "$actual > $threshold" | bc -l) )); then
|
||||||
|
passed=false
|
||||||
|
margin_pct=$(echo "scale=2; ($actual - $threshold) * 100 / $threshold" | bc)
|
||||||
|
VIOLATIONS+=("${name}: ${actual}${unit} exceeds threshold ${threshold}${unit} (+${margin_pct}%)")
|
||||||
|
warn "SLO VIOLATION: ${name} = ${actual}${unit} (threshold: ${threshold}${unit})"
|
||||||
|
else
|
||||||
|
((PASSED_SLOS++))
|
||||||
|
margin_pct=$(echo "scale=2; ($threshold - $actual) * 100 / $threshold" | bc)
|
||||||
|
log "SLO PASSED: ${name} = ${actual}${unit} (threshold: ${threshold}${unit}, margin: ${margin_pct}%)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "{\"slo_name\": \"${name}\", \"threshold\": ${threshold}, \"actual\": ${actual}, \"unit\": \"${unit}\", \"passed\": ${passed}, \"margin_pct\": ${margin_pct}}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Calculate actuals
|
||||||
|
SCAN_P95=$(calc_percentile SCAN_TIMES 95)
|
||||||
|
SCAN_P99=$(calc_percentile SCAN_TIMES 99)
|
||||||
|
MEMORY_MAX=$(calc_max MEMORY_VALUES)
|
||||||
|
CPU_MAX=$(calc_max CPU_TIMES)
|
||||||
|
SBOM_P95=$(calc_percentile SBOM_TIMES 95)
|
||||||
|
POLICY_P95=$(calc_percentile POLICY_TIMES 95)
|
||||||
|
|
||||||
|
# Run evaluations
|
||||||
|
SLO_SCAN_P95=$(evaluate_slo "Scan Time P95" "${SLOS[scan_time_p95_ms]}" "${SCAN_P95}" "ms")
|
||||||
|
SLO_SCAN_P99=$(evaluate_slo "Scan Time P99" "${SLOS[scan_time_p99_ms]}" "${SCAN_P99}" "ms")
|
||||||
|
SLO_MEMORY=$(evaluate_slo "Peak Memory" "${SLOS[memory_peak_mb]}" "${MEMORY_MAX}" "MB")
|
||||||
|
SLO_CPU=$(evaluate_slo "CPU Time" "${SLOS[cpu_time_seconds]}" "${CPU_MAX}" "s")
|
||||||
|
SLO_SBOM=$(evaluate_slo "SBOM Generation P95" "${SLOS[sbom_gen_time_ms]}" "${SBOM_P95}" "ms")
|
||||||
|
SLO_POLICY=$(evaluate_slo "Policy Evaluation P95" "${SLOS[policy_eval_time_ms]}" "${POLICY_P95}" "ms")
|
||||||
|
|
||||||
|
# Generate output
|
||||||
|
ALL_PASSED=true
|
||||||
|
if [[ ${#VIOLATIONS[@]} -gt 0 ]]; then
|
||||||
|
ALL_PASSED=false
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Build violations JSON array
|
||||||
|
VIOLATIONS_JSON="[]"
|
||||||
|
if [[ ${#VIOLATIONS[@]} -gt 0 ]]; then
|
||||||
|
VIOLATIONS_JSON="["
|
||||||
|
for i in "${!VIOLATIONS[@]}"; do
|
||||||
|
[[ $i -gt 0 ]] && VIOLATIONS_JSON+=","
|
||||||
|
VIOLATIONS_JSON+="\"${VIOLATIONS[$i]}\""
|
||||||
|
done
|
||||||
|
VIOLATIONS_JSON+="]"
|
||||||
|
fi
|
||||||
|
|
||||||
|
OUTPUT=$(cat <<EOF
|
||||||
|
{
|
||||||
|
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||||
|
"dry_run": false,
|
||||||
|
"results_path": "${RESULTS_PATH}",
|
||||||
|
"slos_file": "${SLOS_FILE}",
|
||||||
|
"slo_evaluations": {
|
||||||
|
"scan_time_p95": ${SLO_SCAN_P95},
|
||||||
|
"scan_time_p99": ${SLO_SCAN_P99},
|
||||||
|
"memory_peak_mb": ${SLO_MEMORY},
|
||||||
|
"cpu_time_seconds": ${SLO_CPU},
|
||||||
|
"sbom_gen_time_ms": ${SLO_SBOM},
|
||||||
|
"policy_eval_time_ms": ${SLO_POLICY}
|
||||||
|
},
|
||||||
|
"summary": {
|
||||||
|
"total_slos": ${TOTAL_SLOS},
|
||||||
|
"passed": ${PASSED_SLOS},
|
||||||
|
"failed": $((TOTAL_SLOS - PASSED_SLOS)),
|
||||||
|
"all_passed": ${ALL_PASSED},
|
||||||
|
"violations": ${VIOLATIONS_JSON}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
# Output results
|
||||||
|
if [[ -n "${OUTPUT_FILE}" ]]; then
|
||||||
|
echo "${OUTPUT}" > "${OUTPUT_FILE}"
|
||||||
|
log "Results written to ${OUTPUT_FILE}"
|
||||||
|
else
|
||||||
|
echo "${OUTPUT}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Strict mode: fail on violations
|
||||||
|
if [[ "${STRICT}" == "true" ]] && [[ "${ALL_PASSED}" == "false" ]]; then
|
||||||
|
error "Performance SLO violations detected"
|
||||||
|
for v in "${VIOLATIONS[@]}"; do
|
||||||
|
error " - ${v}"
|
||||||
|
done
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
94
.gitea/scripts/metrics/performance-slos.yaml
Normal file
94
.gitea/scripts/metrics/performance-slos.yaml
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
# =============================================================================
|
||||||
|
# Performance SLOs (Service Level Objectives)
|
||||||
|
# Reference: Testing and Quality Guardrails Technical Reference
|
||||||
|
#
|
||||||
|
# These SLOs define the performance budgets for CI quality gates.
|
||||||
|
# Violations will be flagged and may block releases.
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Scan Time SLOs (milliseconds)
|
||||||
|
scan_time:
|
||||||
|
p50:
|
||||||
|
threshold: 15000
|
||||||
|
description: "50th percentile scan time"
|
||||||
|
severity: "info"
|
||||||
|
p95:
|
||||||
|
threshold: 30000
|
||||||
|
description: "95th percentile scan time - primary SLO"
|
||||||
|
severity: "warning"
|
||||||
|
p99:
|
||||||
|
threshold: 60000
|
||||||
|
description: "99th percentile scan time - tail latency"
|
||||||
|
severity: "critical"
|
||||||
|
|
||||||
|
# Memory Usage SLOs (megabytes)
|
||||||
|
memory:
|
||||||
|
peak_mb:
|
||||||
|
threshold: 2048
|
||||||
|
description: "Peak memory usage during scan"
|
||||||
|
severity: "warning"
|
||||||
|
average_mb:
|
||||||
|
threshold: 1024
|
||||||
|
description: "Average memory usage"
|
||||||
|
severity: "info"
|
||||||
|
|
||||||
|
# CPU Time SLOs (seconds)
|
||||||
|
cpu:
|
||||||
|
max_seconds:
|
||||||
|
threshold: 120
|
||||||
|
description: "Maximum CPU time per scan"
|
||||||
|
severity: "warning"
|
||||||
|
average_seconds:
|
||||||
|
threshold: 60
|
||||||
|
description: "Average CPU time per scan"
|
||||||
|
severity: "info"
|
||||||
|
|
||||||
|
# Component-Specific SLOs (milliseconds)
|
||||||
|
components:
|
||||||
|
sbom_generation:
|
||||||
|
p95:
|
||||||
|
threshold: 10000
|
||||||
|
description: "SBOM generation time P95"
|
||||||
|
severity: "warning"
|
||||||
|
policy_evaluation:
|
||||||
|
p95:
|
||||||
|
threshold: 5000
|
||||||
|
description: "Policy evaluation time P95"
|
||||||
|
severity: "warning"
|
||||||
|
reachability_analysis:
|
||||||
|
p95:
|
||||||
|
threshold: 20000
|
||||||
|
description: "Reachability analysis time P95"
|
||||||
|
severity: "warning"
|
||||||
|
vulnerability_matching:
|
||||||
|
p95:
|
||||||
|
threshold: 8000
|
||||||
|
description: "Vulnerability matching time P95"
|
||||||
|
severity: "warning"
|
||||||
|
|
||||||
|
# Resource Budget SLOs
|
||||||
|
resource_budgets:
|
||||||
|
disk_io_mb:
|
||||||
|
threshold: 500
|
||||||
|
description: "Maximum disk I/O per scan"
|
||||||
|
network_calls:
|
||||||
|
threshold: 0
|
||||||
|
description: "Network calls (should be zero for offline scans)"
|
||||||
|
temp_storage_mb:
|
||||||
|
threshold: 1024
|
||||||
|
description: "Maximum temporary storage usage"
|
||||||
|
|
||||||
|
# Regression Thresholds
|
||||||
|
regression:
|
||||||
|
max_degradation_pct: 10
|
||||||
|
warning_threshold_pct: 5
|
||||||
|
baseline_window_days: 30
|
||||||
|
|
||||||
|
# Override Configuration
|
||||||
|
overrides:
|
||||||
|
allowed_labels:
|
||||||
|
- "performance-override"
|
||||||
|
- "large-scan"
|
||||||
|
required_approvers:
|
||||||
|
- "platform"
|
||||||
|
- "performance"
|
||||||
102
.gitea/scripts/metrics/reachability-thresholds.yaml
Normal file
102
.gitea/scripts/metrics/reachability-thresholds.yaml
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
# =============================================================================
|
||||||
|
# Reachability Quality Gate Thresholds
|
||||||
|
# Reference: Testing and Quality Guardrails Technical Reference
|
||||||
|
#
|
||||||
|
# These thresholds are enforced by CI quality gates. Violations will block PRs
|
||||||
|
# unless an override is explicitly approved.
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
thresholds:
|
||||||
|
# Runtime dependency recall: percentage of runtime dependency vulns detected
|
||||||
|
runtime_dependency_recall:
|
||||||
|
min: 0.95
|
||||||
|
description: "Percentage of runtime dependency vulnerabilities detected"
|
||||||
|
severity: "critical"
|
||||||
|
|
||||||
|
# OS package recall: percentage of OS package vulns detected
|
||||||
|
os_package_recall:
|
||||||
|
min: 0.97
|
||||||
|
description: "Percentage of OS package vulnerabilities detected"
|
||||||
|
severity: "critical"
|
||||||
|
|
||||||
|
# Code vulnerability recall: percentage of code-level vulns detected
|
||||||
|
code_vulnerability_recall:
|
||||||
|
min: 0.90
|
||||||
|
description: "Percentage of code vulnerabilities detected"
|
||||||
|
severity: "high"
|
||||||
|
|
||||||
|
# Configuration vulnerability recall
|
||||||
|
config_vulnerability_recall:
|
||||||
|
min: 0.85
|
||||||
|
description: "Percentage of configuration vulnerabilities detected"
|
||||||
|
severity: "medium"
|
||||||
|
|
||||||
|
# False positive rate for unreachable findings
|
||||||
|
unreachable_false_positives:
|
||||||
|
max: 0.05
|
||||||
|
description: "Rate of false positives for unreachable findings"
|
||||||
|
severity: "high"
|
||||||
|
|
||||||
|
# Reachability underreport rate: missed reachable findings
|
||||||
|
reachability_underreport:
|
||||||
|
max: 0.10
|
||||||
|
description: "Rate of reachable findings incorrectly marked unreachable"
|
||||||
|
severity: "critical"
|
||||||
|
|
||||||
|
# Overall precision across all classes
|
||||||
|
overall_precision:
|
||||||
|
min: 0.90
|
||||||
|
description: "Overall precision across all vulnerability classes"
|
||||||
|
severity: "high"
|
||||||
|
|
||||||
|
# F1 score threshold
|
||||||
|
f1_score_min:
|
||||||
|
min: 0.90
|
||||||
|
description: "Minimum F1 score across vulnerability classes"
|
||||||
|
severity: "high"
|
||||||
|
|
||||||
|
# Class-specific thresholds
|
||||||
|
class_thresholds:
|
||||||
|
runtime_dep:
|
||||||
|
recall_min: 0.95
|
||||||
|
precision_min: 0.92
|
||||||
|
f1_min: 0.93
|
||||||
|
|
||||||
|
os_pkg:
|
||||||
|
recall_min: 0.97
|
||||||
|
precision_min: 0.95
|
||||||
|
f1_min: 0.96
|
||||||
|
|
||||||
|
code:
|
||||||
|
recall_min: 0.90
|
||||||
|
precision_min: 0.88
|
||||||
|
f1_min: 0.89
|
||||||
|
|
||||||
|
config:
|
||||||
|
recall_min: 0.85
|
||||||
|
precision_min: 0.80
|
||||||
|
f1_min: 0.82
|
||||||
|
|
||||||
|
# Regression detection settings
|
||||||
|
regression:
|
||||||
|
# Maximum allowed regression from baseline (percentage points)
|
||||||
|
max_recall_regression: 0.02
|
||||||
|
max_precision_regression: 0.03
|
||||||
|
|
||||||
|
# Path to baseline metrics file
|
||||||
|
baseline_path: "bench/baselines/reachability-baseline.json"
|
||||||
|
|
||||||
|
# How many consecutive failures before blocking
|
||||||
|
failure_threshold: 2
|
||||||
|
|
||||||
|
# Override configuration
|
||||||
|
overrides:
|
||||||
|
# Allow temporary bypass for specific PR labels
|
||||||
|
bypass_labels:
|
||||||
|
- "quality-gate-override"
|
||||||
|
- "wip"
|
||||||
|
|
||||||
|
# Require explicit approval from these teams
|
||||||
|
required_approvers:
|
||||||
|
- "platform"
|
||||||
|
- "reachability"
|
||||||
106
.gitea/scripts/sign/sign-authority-gaps.sh
Normal file
106
.gitea/scripts/sign/sign-authority-gaps.sh
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Deterministic DSSE signing helper for Authority gap artefacts (AU1–AU10, RR1–RR10).
|
||||||
|
# Prefers system cosign v3 (bundle) and falls back to repo-pinned v2.6.0.
|
||||||
|
|
||||||
|
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||||
|
COSIGN_BIN="${COSIGN_BIN:-}"
|
||||||
|
|
||||||
|
# Detect cosign binary
|
||||||
|
if [[ -z "$COSIGN_BIN" ]]; then
|
||||||
|
if command -v /usr/local/bin/cosign >/dev/null 2>&1; then
|
||||||
|
COSIGN_BIN="/usr/local/bin/cosign"
|
||||||
|
elif command -v cosign >/dev/null 2>&1; then
|
||||||
|
COSIGN_BIN="$(command -v cosign)"
|
||||||
|
elif [[ -x "$ROOT/tools/cosign/cosign" ]]; then
|
||||||
|
COSIGN_BIN="$ROOT/tools/cosign/cosign"
|
||||||
|
else
|
||||||
|
echo "cosign not found; install or set COSIGN_BIN" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Resolve key
|
||||||
|
TMP_KEY=""
|
||||||
|
if [[ -n "${COSIGN_KEY_FILE:-}" ]]; then
|
||||||
|
KEY_FILE="$COSIGN_KEY_FILE"
|
||||||
|
elif [[ -n "${COSIGN_PRIVATE_KEY_B64:-}" ]]; then
|
||||||
|
TMP_KEY="$(mktemp)"
|
||||||
|
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > "$TMP_KEY"
|
||||||
|
chmod 600 "$TMP_KEY"
|
||||||
|
KEY_FILE="$TMP_KEY"
|
||||||
|
elif [[ -f "$ROOT/tools/cosign/cosign.key" ]]; then
|
||||||
|
KEY_FILE="$ROOT/tools/cosign/cosign.key"
|
||||||
|
elif [[ "${COSIGN_ALLOW_DEV_KEY:-0}" == "1" && -f "$ROOT/tools/cosign/cosign.dev.key" ]]; then
|
||||||
|
echo "[warn] Using development key (tools/cosign/cosign.dev.key); NOT for production/Evidence Locker" >&2
|
||||||
|
KEY_FILE="$ROOT/tools/cosign/cosign.dev.key"
|
||||||
|
else
|
||||||
|
echo "No signing key: set COSIGN_PRIVATE_KEY_B64 or COSIGN_KEY_FILE, or place key at tools/cosign/cosign.key" >&2
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
OUT_BASE="${OUT_DIR:-$ROOT/docs/modules/authority/gaps/dsse/2025-12-04}"
|
||||||
|
if [[ "$OUT_BASE" != /* ]]; then
|
||||||
|
OUT_BASE="$ROOT/$OUT_BASE"
|
||||||
|
fi
|
||||||
|
mkdir -p "$OUT_BASE"
|
||||||
|
|
||||||
|
ARTEFACTS=(
|
||||||
|
"docs/modules/authority/gaps/artifacts/authority-scope-role-catalog.v1.json|authority-scope-role-catalog"
|
||||||
|
"docs/modules/authority/gaps/artifacts/authority-jwks-metadata.schema.json|authority-jwks-metadata.schema"
|
||||||
|
"docs/modules/authority/gaps/artifacts/crypto-profile-registry.v1.json|crypto-profile-registry"
|
||||||
|
"docs/modules/authority/gaps/artifacts/authority-offline-verifier-bundle.v1.json|authority-offline-verifier-bundle"
|
||||||
|
"docs/modules/authority/gaps/artifacts/authority-abac.schema.json|authority-abac.schema"
|
||||||
|
"docs/modules/authority/gaps/artifacts/rekor-receipt-policy.v1.json|rekor-receipt-policy"
|
||||||
|
"docs/modules/authority/gaps/artifacts/rekor-receipt.schema.json|rekor-receipt.schema"
|
||||||
|
"docs/modules/authority/gaps/artifacts/rekor-receipt-bundle.v1.json|rekor-receipt-bundle"
|
||||||
|
)
|
||||||
|
|
||||||
|
USE_BUNDLE=0
|
||||||
|
if $COSIGN_BIN version --json 2>/dev/null | grep -q '"GitVersion":"v3'; then
|
||||||
|
USE_BUNDLE=1
|
||||||
|
elif $COSIGN_BIN version 2>/dev/null | grep -q 'GitVersion:.*v3\.'; then
|
||||||
|
USE_BUNDLE=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
SHA_FILE="$OUT_BASE/SHA256SUMS"
|
||||||
|
: > "$SHA_FILE"
|
||||||
|
|
||||||
|
for entry in "${ARTEFACTS[@]}"; do
|
||||||
|
IFS="|" read -r path stem <<<"$entry"
|
||||||
|
if [[ ! -f "$ROOT/$path" ]]; then
|
||||||
|
echo "Missing artefact: $path" >&2
|
||||||
|
exit 3
|
||||||
|
fi
|
||||||
|
if (( USE_BUNDLE )); then
|
||||||
|
bundle="$OUT_BASE/${stem}.sigstore.json"
|
||||||
|
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
|
||||||
|
"$COSIGN_BIN" sign-blob \
|
||||||
|
--key "$KEY_FILE" \
|
||||||
|
--yes \
|
||||||
|
--tlog-upload=false \
|
||||||
|
--bundle "$bundle" \
|
||||||
|
"$ROOT/$path"
|
||||||
|
printf "%s %s\n" "$(sha256sum "$bundle" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$bundle")" >> "$SHA_FILE"
|
||||||
|
else
|
||||||
|
sig="$OUT_BASE/${stem}.dsse"
|
||||||
|
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
|
||||||
|
"$COSIGN_BIN" sign-blob \
|
||||||
|
--key "$KEY_FILE" \
|
||||||
|
--yes \
|
||||||
|
--tlog-upload=false \
|
||||||
|
--output-signature "$sig" \
|
||||||
|
"$ROOT/$path"
|
||||||
|
printf "%s %s\n" "$(sha256sum "$sig" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$sig")" >> "$SHA_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "%s %s\n" "$(sha256sum "$ROOT/$path" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$ROOT/$path")" >> "$SHA_FILE"
|
||||||
|
echo "Signed $path"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Signed artefacts written to $OUT_BASE"
|
||||||
|
|
||||||
|
if [[ -n "$TMP_KEY" ]]; then
|
||||||
|
rm -f "$TMP_KEY"
|
||||||
|
fi
|
||||||
50
.gitea/scripts/sign/sign-policy.sh
Normal file
50
.gitea/scripts/sign/sign-policy.sh
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
# Signs a policy file with cosign and verifies it. Intended for CI and offline use.
|
||||||
|
# Requires COSIGN_KEY_B64 (private key PEM base64) or KMS envs; optional COSIGN_PASSWORD.
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<'USAGE'
|
||||||
|
Usage: sign-policy.sh --file <path> [--out-dir out/policy-sign]
|
||||||
|
Env:
|
||||||
|
COSIGN_KEY_B64 base64-encoded PEM private key (if not using KMS)
|
||||||
|
COSIGN_PASSWORD passphrase for the key (can be empty for test keys)
|
||||||
|
COSIGN_PUBLIC_KEY_PATH optional path to write public key for verify step
|
||||||
|
USAGE
|
||||||
|
}
|
||||||
|
|
||||||
|
FILE=""
|
||||||
|
OUT_DIR="out/policy-sign"
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--file) FILE="$2"; shift 2;;
|
||||||
|
--out-dir) OUT_DIR="$2"; shift 2;;
|
||||||
|
-h|--help) usage; exit 0;;
|
||||||
|
*) echo "Unknown arg: $1" >&2; usage; exit 1;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -z "$FILE" ]]; then echo "--file is required" >&2; exit 1; fi
|
||||||
|
if [[ ! -f "$FILE" ]]; then echo "file not found: $FILE" >&2; exit 1; fi
|
||||||
|
|
||||||
|
mkdir -p "$OUT_DIR"
|
||||||
|
BASENAME=$(basename "$FILE")
|
||||||
|
SIG="$OUT_DIR/${BASENAME}.sig"
|
||||||
|
PUB_OUT="${COSIGN_PUBLIC_KEY_PATH:-$OUT_DIR/cosign.pub}"
|
||||||
|
|
||||||
|
if [[ -n "${COSIGN_KEY_B64:-}" ]]; then
|
||||||
|
KEYFILE="$OUT_DIR/cosign.key"
|
||||||
|
printf "%s" "$COSIGN_KEY_B64" | base64 -d > "$KEYFILE"
|
||||||
|
chmod 600 "$KEYFILE"
|
||||||
|
export COSIGN_KEY="$KEYFILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
export COSIGN_PASSWORD=${COSIGN_PASSWORD:-}
|
||||||
|
cosign version >/dev/null
|
||||||
|
|
||||||
|
cosign sign-blob "$FILE" --output-signature "$SIG"
|
||||||
|
cosign public-key --key "$COSIGN_KEY" > "$PUB_OUT"
|
||||||
|
cosign verify-blob --key "$PUB_OUT" --signature "$SIG" "$FILE"
|
||||||
|
|
||||||
|
printf "Signed %s -> %s\nPublic key -> %s\n" "$FILE" "$SIG" "$PUB_OUT"
|
||||||
106
.gitea/scripts/sign/sign-signals.sh
Normal file
106
.gitea/scripts/sign/sign-signals.sh
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Deterministic DSSE signing helper for Signals artifacts.
|
||||||
|
# Prefers system cosign v3 (bundle) and falls back to repo-pinned v2.6.0.
|
||||||
|
|
||||||
|
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||||
|
COSIGN_BIN="${COSIGN_BIN:-}"
|
||||||
|
|
||||||
|
# Detect cosign binary (v3 preferred).
|
||||||
|
if [[ -z "$COSIGN_BIN" ]]; then
|
||||||
|
if command -v /usr/local/bin/cosign >/dev/null 2>&1; then
|
||||||
|
COSIGN_BIN="/usr/local/bin/cosign"
|
||||||
|
elif command -v cosign >/dev/null 2>&1; then
|
||||||
|
COSIGN_BIN="$(command -v cosign)"
|
||||||
|
elif [[ -x "$ROOT/tools/cosign/cosign" ]]; then
|
||||||
|
COSIGN_BIN="$ROOT/tools/cosign/cosign"
|
||||||
|
else
|
||||||
|
echo "cosign not found; install or set COSIGN_BIN" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Resolve key
|
||||||
|
TMP_KEY=""
|
||||||
|
if [[ -n "${COSIGN_KEY_FILE:-}" ]]; then
|
||||||
|
KEY_FILE="$COSIGN_KEY_FILE"
|
||||||
|
elif [[ -n "${COSIGN_PRIVATE_KEY_B64:-}" ]]; then
|
||||||
|
TMP_KEY="$(mktemp)"
|
||||||
|
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > "$TMP_KEY"
|
||||||
|
chmod 600 "$TMP_KEY"
|
||||||
|
KEY_FILE="$TMP_KEY"
|
||||||
|
elif [[ -f "$ROOT/tools/cosign/cosign.key" ]]; then
|
||||||
|
KEY_FILE="$ROOT/tools/cosign/cosign.key"
|
||||||
|
elif [[ "${COSIGN_ALLOW_DEV_KEY:-0}" == "1" && -f "$ROOT/tools/cosign/cosign.dev.key" ]]; then
|
||||||
|
echo "[warn] Using development key (tools/cosign/cosign.dev.key); NOT for production/Evidence Locker" >&2
|
||||||
|
KEY_FILE="$ROOT/tools/cosign/cosign.dev.key"
|
||||||
|
else
|
||||||
|
echo "No signing key: set COSIGN_PRIVATE_KEY_B64 or COSIGN_KEY_FILE, or place key at tools/cosign/cosign.key" >&2
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
OUT_BASE="${OUT_DIR:-$ROOT/evidence-locker/signals/2025-12-01}"
|
||||||
|
# Normalize OUT_BASE to absolute to avoid pushd-relative path issues.
|
||||||
|
if [[ "$OUT_BASE" != /* ]]; then
|
||||||
|
OUT_BASE="$ROOT/$OUT_BASE"
|
||||||
|
fi
|
||||||
|
mkdir -p "$OUT_BASE"
|
||||||
|
|
||||||
|
ARTIFACTS=(
|
||||||
|
"decay/confidence_decay_config.yaml|stella.ops/confidenceDecayConfig@v1|confidence_decay_config"
|
||||||
|
"unknowns/unknowns_scoring_manifest.json|stella.ops/unknownsScoringManifest@v1|unknowns_scoring_manifest"
|
||||||
|
"heuristics/heuristics.catalog.json|stella.ops/heuristicCatalog@v1|heuristics_catalog"
|
||||||
|
)
|
||||||
|
|
||||||
|
USE_BUNDLE=0
|
||||||
|
if $COSIGN_BIN version --json 2>/dev/null | grep -q '"GitVersion":"v3'; then
|
||||||
|
USE_BUNDLE=1
|
||||||
|
elif $COSIGN_BIN version 2>/dev/null | grep -q 'GitVersion:.*v3\.'; then
|
||||||
|
USE_BUNDLE=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
pushd "$ROOT/docs/modules/signals" >/dev/null
|
||||||
|
|
||||||
|
SHA_FILE="$OUT_BASE/SHA256SUMS"
|
||||||
|
: > "$SHA_FILE"
|
||||||
|
|
||||||
|
for entry in "${ARTIFACTS[@]}"; do
|
||||||
|
IFS="|" read -r path predicate stem <<<"$entry"
|
||||||
|
if [[ ! -f "$path" ]]; then
|
||||||
|
echo "Missing artifact: $path" >&2
|
||||||
|
exit 3
|
||||||
|
fi
|
||||||
|
|
||||||
|
if (( USE_BUNDLE )); then
|
||||||
|
bundle="$OUT_BASE/${stem}.sigstore.json"
|
||||||
|
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
|
||||||
|
"$COSIGN_BIN" sign-blob \
|
||||||
|
--key "$KEY_FILE" \
|
||||||
|
--yes \
|
||||||
|
--tlog-upload=false \
|
||||||
|
--bundle "$bundle" \
|
||||||
|
"$path"
|
||||||
|
printf "%s %s\n" "$(sha256sum "$bundle" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$bundle")" >> "$SHA_FILE"
|
||||||
|
else
|
||||||
|
sig="$OUT_BASE/${stem}.dsse"
|
||||||
|
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
|
||||||
|
"$COSIGN_BIN" sign-blob \
|
||||||
|
--key "$KEY_FILE" \
|
||||||
|
--yes \
|
||||||
|
--tlog-upload=false \
|
||||||
|
--output-signature "$sig" \
|
||||||
|
"$path"
|
||||||
|
printf "%s %s\n" "$(sha256sum "$sig" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$sig")" >> "$SHA_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "%s %s\n" "$(sha256sum "$path" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$path")" >> "$SHA_FILE"
|
||||||
|
done
|
||||||
|
|
||||||
|
popd >/dev/null
|
||||||
|
|
||||||
|
echo "Signed artifacts written to $OUT_BASE"
|
||||||
|
|
||||||
|
if [[ -n "$TMP_KEY" ]]; then
|
||||||
|
rm -f "$TMP_KEY"
|
||||||
|
fi
|
||||||
22
.gitea/scripts/test/determinism-run.sh
Normal file
22
.gitea/scripts/test/determinism-run.sh
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# DEVOPS-SCAN-90-004: run determinism harness/tests and collect report
|
||||||
|
|
||||||
|
ROOT="$(git rev-parse --show-toplevel)"
|
||||||
|
OUT="${ROOT}/out/scanner-determinism"
|
||||||
|
mkdir -p "$OUT"
|
||||||
|
|
||||||
|
PROJECT="src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj"
|
||||||
|
|
||||||
|
echo "[determinism] running dotnet test (filter=Determinism)"
|
||||||
|
dotnet test "$PROJECT" --no-build --logger "trx;LogFileName=determinism.trx" --filter Determinism
|
||||||
|
|
||||||
|
find "$(dirname "$PROJECT")" -name "*.trx" -print -exec cp {} "$OUT/" \;
|
||||||
|
|
||||||
|
echo "[determinism] summarizing"
|
||||||
|
printf "project=%s\n" "$PROJECT" > "$OUT/summary.txt"
|
||||||
|
printf "timestamp=%s\n" "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" >> "$OUT/summary.txt"
|
||||||
|
|
||||||
|
tar -C "$OUT" -czf "$OUT/determinism-artifacts.tgz" .
|
||||||
|
echo "[determinism] artifacts at $OUT"
|
||||||
7
.gitea/scripts/test/run-fixtures-check.sh
Normal file
7
.gitea/scripts/test/run-fixtures-check.sh
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
root_dir=$(cd "$(dirname "$0")/.." && pwd)
|
||||||
|
verifier="$root_dir/packs/verify_offline_bundle.py"
|
||||||
|
python3 "$verifier" --bundle "$root_dir/packs/__fixtures__/good" --manifest bundle.json --require-dsse
|
||||||
|
python3 "$verifier" --bundle "$root_dir/packs/__fixtures__/bad" --manifest bundle-missing-quota.json --require-dsse && exit 1 || true
|
||||||
|
echo "fixture checks completed"
|
||||||
16
.gitea/scripts/util/cleanup-runner-space.sh
Normal file
16
.gitea/scripts/util/cleanup-runner-space.sh
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Safe-ish workspace cleanup when the runner hits “No space left on device”.
|
||||||
|
# Deletes build/test outputs that are regenerated; preserves offline caches and sources.
|
||||||
|
set -euo pipefail
|
||||||
|
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||||
|
|
||||||
|
echo "Cleaning workspace outputs under: ${ROOT}"
|
||||||
|
|
||||||
|
rm -rf "${ROOT}/TestResults" || true
|
||||||
|
rm -rf "${ROOT}/out" || true
|
||||||
|
rm -rf "${ROOT}/artifacts" || true
|
||||||
|
|
||||||
|
# Trim common temp locations if they exist in repo workspace
|
||||||
|
[ -d "${ROOT}/tmp" ] && find "${ROOT}/tmp" -mindepth 1 -maxdepth 1 -exec rm -rf {} +
|
||||||
|
|
||||||
|
echo "Done. Consider also clearing any runner-level /tmp outside the workspace if safe."
|
||||||
27
.gitea/scripts/util/dotnet-filter.sh
Normal file
27
.gitea/scripts/util/dotnet-filter.sh
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Thin wrapper to strip the harness-injected "workdir:" switch that breaks dotnet/msbuild parsing.
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
real_dotnet="$(command -v dotnet)"
|
||||||
|
if [[ -z "${real_dotnet}" ]]; then
|
||||||
|
echo "dotnet executable not found in PATH" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
filtered_args=()
|
||||||
|
for arg in "$@"; do
|
||||||
|
# Drop any argument that is exactly or contains the injected workdir switch.
|
||||||
|
if [[ "${arg}" == *"workdir:"* ]]; then
|
||||||
|
# If the arg also contains other comma-separated parts, keep the non-workdir pieces.
|
||||||
|
IFS=',' read -r -a parts <<< "${arg}"
|
||||||
|
for part in "${parts[@]}"; do
|
||||||
|
[[ "${part}" == *"workdir:"* || -z "${part}" ]] && continue
|
||||||
|
filtered_args+=("${part}")
|
||||||
|
done
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
filtered_args+=("${arg}")
|
||||||
|
done
|
||||||
|
|
||||||
|
exec "${real_dotnet}" "${filtered_args[@]}"
|
||||||
26
.gitea/scripts/util/enable-openssl11-shim.sh
Normal file
26
.gitea/scripts/util/enable-openssl11-shim.sh
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Ensures OpenSSL 1.1 shim is discoverable for Mongo2Go by exporting LD_LIBRARY_PATH.
|
||||||
|
# Safe for repeated invocation; respects STELLAOPS_OPENSSL11_SHIM override.
|
||||||
|
|
||||||
|
ROOT=${STELLAOPS_REPO_ROOT:-$(git rev-parse --show-toplevel 2>/dev/null || pwd)}
|
||||||
|
SHIM_DIR=${STELLAOPS_OPENSSL11_SHIM:-"${ROOT}/src/__Tests/native/openssl-1.1/linux-x64"}
|
||||||
|
|
||||||
|
if [[ ! -d "${SHIM_DIR}" ]]; then
|
||||||
|
echo "::warning ::OpenSSL 1.1 shim directory not found at ${SHIM_DIR}; Mongo2Go tests may fail" >&2
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
export LD_LIBRARY_PATH="${SHIM_DIR}:${LD_LIBRARY_PATH:-}"
|
||||||
|
export STELLAOPS_OPENSSL11_SHIM="${SHIM_DIR}"
|
||||||
|
|
||||||
|
# Persist for subsequent CI steps when available
|
||||||
|
if [[ -n "${GITHUB_ENV:-}" ]]; then
|
||||||
|
{
|
||||||
|
echo "LD_LIBRARY_PATH=${LD_LIBRARY_PATH}"
|
||||||
|
echo "STELLAOPS_OPENSSL11_SHIM=${STELLAOPS_OPENSSL11_SHIM}"
|
||||||
|
} >> "${GITHUB_ENV}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "OpenSSL 1.1 shim enabled (LD_LIBRARY_PATH=${LD_LIBRARY_PATH})"
|
||||||
53
.gitea/scripts/validate/validate-compose.sh
Normal file
53
.gitea/scripts/validate/validate-compose.sh
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# validate-compose.sh - Validate all Docker Compose profiles
|
||||||
|
# Used by CI/CD pipelines to ensure Compose configurations are valid
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||||
|
COMPOSE_DIR="${REPO_ROOT}/devops/compose"
|
||||||
|
|
||||||
|
# Default profiles to validate
|
||||||
|
PROFILES=(dev stage prod airgap mirror)
|
||||||
|
|
||||||
|
echo "=== Docker Compose Validation ==="
|
||||||
|
echo "Compose directory: $COMPOSE_DIR"
|
||||||
|
|
||||||
|
# Check if compose directory exists
|
||||||
|
if [[ ! -d "$COMPOSE_DIR" ]]; then
|
||||||
|
echo "::warning::Compose directory not found at $COMPOSE_DIR"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for base docker-compose.yml
|
||||||
|
BASE_COMPOSE="$COMPOSE_DIR/docker-compose.yml"
|
||||||
|
if [[ ! -f "$BASE_COMPOSE" ]]; then
|
||||||
|
echo "::warning::Base docker-compose.yml not found at $BASE_COMPOSE"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
for profile in "${PROFILES[@]}"; do
|
||||||
|
OVERLAY="$COMPOSE_DIR/docker-compose.$profile.yml"
|
||||||
|
|
||||||
|
if [[ -f "$OVERLAY" ]]; then
|
||||||
|
echo "=== Validating docker-compose.$profile.yml ==="
|
||||||
|
if docker compose -f "$BASE_COMPOSE" -f "$OVERLAY" config --quiet 2>&1; then
|
||||||
|
echo "✓ Profile '$profile' is valid"
|
||||||
|
else
|
||||||
|
echo "✗ Profile '$profile' validation failed"
|
||||||
|
FAILED=1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "⊘ Skipping profile '$profile' (no overlay file)"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ $FAILED -eq 1 ]]; then
|
||||||
|
echo "::error::One or more Compose profiles failed validation"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "=== All Compose profiles valid! ==="
|
||||||
59
.gitea/scripts/validate/validate-helm.sh
Normal file
59
.gitea/scripts/validate/validate-helm.sh
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# validate-helm.sh - Validate Helm charts
|
||||||
|
# Used by CI/CD pipelines to ensure Helm charts are valid
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||||
|
HELM_DIR="${REPO_ROOT}/devops/helm"
|
||||||
|
|
||||||
|
echo "=== Helm Chart Validation ==="
|
||||||
|
echo "Helm directory: $HELM_DIR"
|
||||||
|
|
||||||
|
# Check if helm is installed
|
||||||
|
if ! command -v helm &>/dev/null; then
|
||||||
|
echo "::error::Helm is not installed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if helm directory exists
|
||||||
|
if [[ ! -d "$HELM_DIR" ]]; then
|
||||||
|
echo "::warning::Helm directory not found at $HELM_DIR"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
# Find all Chart.yaml files (indicates a Helm chart)
|
||||||
|
while IFS= read -r -d '' chart_file; do
|
||||||
|
chart_dir="$(dirname "$chart_file")"
|
||||||
|
chart_name="$(basename "$chart_dir")"
|
||||||
|
|
||||||
|
echo "=== Validating chart: $chart_name ==="
|
||||||
|
|
||||||
|
# Lint the chart
|
||||||
|
if helm lint "$chart_dir" 2>&1; then
|
||||||
|
echo "✓ Chart '$chart_name' lint passed"
|
||||||
|
else
|
||||||
|
echo "✗ Chart '$chart_name' lint failed"
|
||||||
|
FAILED=1
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Template the chart (dry-run)
|
||||||
|
if helm template "$chart_name" "$chart_dir" --debug >/dev/null 2>&1; then
|
||||||
|
echo "✓ Chart '$chart_name' template succeeded"
|
||||||
|
else
|
||||||
|
echo "✗ Chart '$chart_name' template failed"
|
||||||
|
FAILED=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
done < <(find "$HELM_DIR" -name "Chart.yaml" -print0)
|
||||||
|
|
||||||
|
if [[ $FAILED -eq 1 ]]; then
|
||||||
|
echo "::error::One or more Helm charts failed validation"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "=== All Helm charts valid! ==="
|
||||||
201
.gitea/scripts/validate/validate-licenses.sh
Normal file
201
.gitea/scripts/validate/validate-licenses.sh
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# License validation script for StellaOps CI
|
||||||
|
# Usage: validate-licenses.sh <type> <input-file>
|
||||||
|
# type: nuget | npm
|
||||||
|
# input-file: Path to package list or license-checker output
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# SPDX identifiers for licenses compatible with AGPL-3.0-or-later
|
||||||
|
ALLOWED_LICENSES=(
|
||||||
|
"MIT"
|
||||||
|
"Apache-2.0"
|
||||||
|
"Apache 2.0"
|
||||||
|
"BSD-2-Clause"
|
||||||
|
"BSD-3-Clause"
|
||||||
|
"BSD"
|
||||||
|
"ISC"
|
||||||
|
"0BSD"
|
||||||
|
"CC0-1.0"
|
||||||
|
"CC0"
|
||||||
|
"Unlicense"
|
||||||
|
"PostgreSQL"
|
||||||
|
"MPL-2.0"
|
||||||
|
"MPL 2.0"
|
||||||
|
"LGPL-2.1-or-later"
|
||||||
|
"LGPL-3.0-or-later"
|
||||||
|
"GPL-3.0-or-later"
|
||||||
|
"AGPL-3.0-or-later"
|
||||||
|
"Zlib"
|
||||||
|
"WTFPL"
|
||||||
|
"BlueOak-1.0.0"
|
||||||
|
"Python-2.0"
|
||||||
|
"(MIT OR Apache-2.0)"
|
||||||
|
"(Apache-2.0 OR MIT)"
|
||||||
|
"MIT OR Apache-2.0"
|
||||||
|
"Apache-2.0 OR MIT"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Licenses that are OK but should be noted
|
||||||
|
CONDITIONAL_LICENSES=(
|
||||||
|
"MPL-2.0"
|
||||||
|
"LGPL-2.1-or-later"
|
||||||
|
"LGPL-3.0-or-later"
|
||||||
|
"CC-BY-4.0"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Licenses that are NOT compatible with AGPL-3.0-or-later
|
||||||
|
BLOCKED_LICENSES=(
|
||||||
|
"GPL-2.0-only"
|
||||||
|
"SSPL-1.0"
|
||||||
|
"SSPL"
|
||||||
|
"BUSL-1.1"
|
||||||
|
"BSL-1.0"
|
||||||
|
"Commons Clause"
|
||||||
|
"Proprietary"
|
||||||
|
"Commercial"
|
||||||
|
"UNLICENSED"
|
||||||
|
)
|
||||||
|
|
||||||
|
TYPE="${1:-}"
|
||||||
|
INPUT="${2:-}"
|
||||||
|
|
||||||
|
if [[ -z "$TYPE" || -z "$INPUT" ]]; then
|
||||||
|
echo "Usage: $0 <nuget|npm> <input-file>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -f "$INPUT" ]]; then
|
||||||
|
echo "ERROR: Input file not found: $INPUT"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "=== StellaOps License Validation ==="
|
||||||
|
echo "Type: $TYPE"
|
||||||
|
echo "Input: $INPUT"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
found_blocked=0
|
||||||
|
found_conditional=0
|
||||||
|
found_unknown=0
|
||||||
|
|
||||||
|
validate_npm() {
|
||||||
|
local input="$1"
|
||||||
|
|
||||||
|
echo "Validating npm licenses..."
|
||||||
|
|
||||||
|
# Extract licenses from license-checker JSON output
|
||||||
|
if command -v jq &> /dev/null; then
|
||||||
|
jq -r 'to_entries[] | "\(.key): \(.value.licenses)"' "$input" 2>/dev/null | while read -r line; do
|
||||||
|
pkg=$(echo "$line" | cut -d: -f1)
|
||||||
|
license=$(echo "$line" | cut -d: -f2- | xargs)
|
||||||
|
|
||||||
|
# Check if license is blocked
|
||||||
|
for blocked in "${BLOCKED_LICENSES[@]}"; do
|
||||||
|
if [[ "$license" == *"$blocked"* ]]; then
|
||||||
|
echo "BLOCKED: $pkg uses '$license'"
|
||||||
|
found_blocked=$((found_blocked + 1))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Check if license is allowed
|
||||||
|
allowed=0
|
||||||
|
for ok_license in "${ALLOWED_LICENSES[@]}"; do
|
||||||
|
if [[ "$license" == *"$ok_license"* ]]; then
|
||||||
|
allowed=1
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ $allowed -eq 0 ]]; then
|
||||||
|
echo "UNKNOWN: $pkg uses '$license'"
|
||||||
|
found_unknown=$((found_unknown + 1))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
else
|
||||||
|
echo "WARNING: jq not available, performing basic grep check"
|
||||||
|
for blocked in "${BLOCKED_LICENSES[@]}"; do
|
||||||
|
if grep -qi "$blocked" "$input"; then
|
||||||
|
echo "BLOCKED: Found potentially blocked license: $blocked"
|
||||||
|
found_blocked=$((found_blocked + 1))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_nuget() {
|
||||||
|
local input="$1"
|
||||||
|
|
||||||
|
echo "Validating NuGet licenses..."
|
||||||
|
|
||||||
|
# NuGet package list doesn't include licenses directly
|
||||||
|
# We check for known problematic packages
|
||||||
|
|
||||||
|
# Known packages with compatible licenses (allowlist approach for critical packages)
|
||||||
|
known_good_patterns=(
|
||||||
|
"Microsoft."
|
||||||
|
"System."
|
||||||
|
"Newtonsoft.Json"
|
||||||
|
"Serilog"
|
||||||
|
"BouncyCastle"
|
||||||
|
"Npgsql"
|
||||||
|
"Dapper"
|
||||||
|
"Polly"
|
||||||
|
"xunit"
|
||||||
|
"Moq"
|
||||||
|
"FluentAssertions"
|
||||||
|
"CycloneDX"
|
||||||
|
"YamlDotNet"
|
||||||
|
"StackExchange.Redis"
|
||||||
|
"Google."
|
||||||
|
"AWSSDK."
|
||||||
|
"Grpc."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if any packages don't match known patterns
|
||||||
|
echo "Checking for unknown packages..."
|
||||||
|
|
||||||
|
# This is informational - we trust the allowlist in THIRD-PARTY-DEPENDENCIES.md
|
||||||
|
echo "OK: NuGet validation relies on documented license allowlist"
|
||||||
|
echo "See: docs/legal/THIRD-PARTY-DEPENDENCIES.md"
|
||||||
|
}
|
||||||
|
|
||||||
|
case "$TYPE" in
|
||||||
|
npm)
|
||||||
|
validate_npm "$INPUT"
|
||||||
|
;;
|
||||||
|
nuget)
|
||||||
|
validate_nuget "$INPUT"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "ERROR: Unknown type: $TYPE"
|
||||||
|
echo "Supported types: nuget, npm"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Validation Summary ==="
|
||||||
|
echo "Blocked licenses found: $found_blocked"
|
||||||
|
echo "Conditional licenses found: $found_conditional"
|
||||||
|
echo "Unknown licenses found: $found_unknown"
|
||||||
|
|
||||||
|
if [[ $found_blocked -gt 0 ]]; then
|
||||||
|
echo ""
|
||||||
|
echo "ERROR: Blocked licenses detected!"
|
||||||
|
echo "These licenses are NOT compatible with AGPL-3.0-or-later"
|
||||||
|
echo "Please remove or replace the affected packages"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $found_unknown -gt 0 ]]; then
|
||||||
|
echo ""
|
||||||
|
echo "WARNING: Unknown licenses detected"
|
||||||
|
echo "Please review and add to allowlist if compatible"
|
||||||
|
echo "See: docs/legal/LICENSE-COMPATIBILITY.md"
|
||||||
|
# Don't fail on unknown - just warn
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "License validation: PASSED"
|
||||||
|
exit 0
|
||||||
260
.gitea/scripts/validate/validate-migrations.sh
Normal file
260
.gitea/scripts/validate/validate-migrations.sh
Normal file
@@ -0,0 +1,260 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Migration Validation Script
|
||||||
|
# Validates migration naming conventions, detects duplicates, and checks for issues.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./validate-migrations.sh [--strict] [--fix-scanner]
|
||||||
|
#
|
||||||
|
# Options:
|
||||||
|
# --strict Exit with error on any warning
|
||||||
|
# --fix-scanner Generate rename commands for Scanner duplicates
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||||
|
|
||||||
|
STRICT_MODE=false
|
||||||
|
FIX_SCANNER=false
|
||||||
|
EXIT_CODE=0
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
for arg in "$@"; do
|
||||||
|
case $arg in
|
||||||
|
--strict)
|
||||||
|
STRICT_MODE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--fix-scanner)
|
||||||
|
FIX_SCANNER=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "=== Migration Validation ==="
|
||||||
|
echo "Repository: $REPO_ROOT"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Track issues
|
||||||
|
ERRORS=()
|
||||||
|
WARNINGS=()
|
||||||
|
|
||||||
|
# Function to check for duplicates in a directory
|
||||||
|
check_duplicates() {
|
||||||
|
local dir="$1"
|
||||||
|
local module="$2"
|
||||||
|
|
||||||
|
if [ ! -d "$dir" ]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract numeric prefixes and find duplicates
|
||||||
|
local duplicates
|
||||||
|
duplicates=$(find "$dir" -maxdepth 1 -name "*.sql" -printf "%f\n" 2>/dev/null | \
|
||||||
|
sed -E 's/^([0-9]+)_.*/\1/' | \
|
||||||
|
sort | uniq -d)
|
||||||
|
|
||||||
|
if [ -n "$duplicates" ]; then
|
||||||
|
for prefix in $duplicates; do
|
||||||
|
local files
|
||||||
|
files=$(find "$dir" -maxdepth 1 -name "${prefix}_*.sql" -printf "%f\n" | tr '\n' ', ' | sed 's/,$//')
|
||||||
|
ERRORS+=("[$module] Duplicate prefix $prefix: $files")
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to check naming convention
|
||||||
|
check_naming() {
|
||||||
|
local dir="$1"
|
||||||
|
local module="$2"
|
||||||
|
|
||||||
|
if [ ! -d "$dir" ]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
find "$dir" -maxdepth 1 -name "*.sql" -printf "%f\n" 2>/dev/null | while read -r file; do
|
||||||
|
# Check standard pattern: NNN_description.sql
|
||||||
|
if [[ "$file" =~ ^[0-9]{3}_[a-z0-9_]+\.sql$ ]]; then
|
||||||
|
continue # Valid standard
|
||||||
|
fi
|
||||||
|
# Check seed pattern: SNNN_description.sql
|
||||||
|
if [[ "$file" =~ ^S[0-9]{3}_[a-z0-9_]+\.sql$ ]]; then
|
||||||
|
continue # Valid seed
|
||||||
|
fi
|
||||||
|
# Check data migration pattern: DMNNN_description.sql
|
||||||
|
if [[ "$file" =~ ^DM[0-9]{3}_[a-z0-9_]+\.sql$ ]]; then
|
||||||
|
continue # Valid data migration
|
||||||
|
fi
|
||||||
|
# Check for Flyway-style
|
||||||
|
if [[ "$file" =~ ^V[0-9]+.*\.sql$ ]]; then
|
||||||
|
WARNINGS+=("[$module] Flyway-style naming: $file (consider NNN_description.sql)")
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
# Check for EF Core timestamp style
|
||||||
|
if [[ "$file" =~ ^[0-9]{14,}_.*\.sql$ ]]; then
|
||||||
|
WARNINGS+=("[$module] EF Core timestamp naming: $file (consider NNN_description.sql)")
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
# Check for 4-digit prefix
|
||||||
|
if [[ "$file" =~ ^[0-9]{4}_.*\.sql$ ]]; then
|
||||||
|
WARNINGS+=("[$module] 4-digit prefix: $file (standard is 3-digit NNN_description.sql)")
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
# Non-standard
|
||||||
|
WARNINGS+=("[$module] Non-standard naming: $file")
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to check for dangerous operations in startup migrations
|
||||||
|
check_dangerous_ops() {
|
||||||
|
local dir="$1"
|
||||||
|
local module="$2"
|
||||||
|
|
||||||
|
if [ ! -d "$dir" ]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
find "$dir" -maxdepth 1 -name "*.sql" -printf "%f\n" 2>/dev/null | while read -r file; do
|
||||||
|
local filepath="$dir/$file"
|
||||||
|
local prefix
|
||||||
|
prefix=$(echo "$file" | sed -E 's/^([0-9]+)_.*/\1/')
|
||||||
|
|
||||||
|
# Only check startup migrations (001-099)
|
||||||
|
if [[ "$prefix" =~ ^0[0-9]{2}$ ]] && [ "$prefix" -lt 100 ]; then
|
||||||
|
# Check for DROP TABLE without IF EXISTS
|
||||||
|
if grep -qE "DROP\s+TABLE\s+(?!IF\s+EXISTS)" "$filepath" 2>/dev/null; then
|
||||||
|
ERRORS+=("[$module] $file: DROP TABLE without IF EXISTS in startup migration")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for DROP COLUMN (breaking change in startup)
|
||||||
|
if grep -qiE "ALTER\s+TABLE.*DROP\s+COLUMN" "$filepath" 2>/dev/null; then
|
||||||
|
ERRORS+=("[$module] $file: DROP COLUMN in startup migration (should be release migration 100+)")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for TRUNCATE
|
||||||
|
if grep -qiE "^\s*TRUNCATE" "$filepath" 2>/dev/null; then
|
||||||
|
ERRORS+=("[$module] $file: TRUNCATE in startup migration")
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Scan all module migration directories
|
||||||
|
echo "Scanning migration directories..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Define module migration paths
|
||||||
|
declare -A MIGRATION_PATHS
|
||||||
|
MIGRATION_PATHS=(
|
||||||
|
["Authority"]="src/Authority/__Libraries/StellaOps.Authority.Storage.Postgres/Migrations"
|
||||||
|
["Concelier"]="src/Concelier/__Libraries/StellaOps.Concelier.Storage.Postgres/Migrations"
|
||||||
|
["Excititor"]="src/Excititor/__Libraries/StellaOps.Excititor.Storage.Postgres/Migrations"
|
||||||
|
["Policy"]="src/Policy/__Libraries/StellaOps.Policy.Storage.Postgres/Migrations"
|
||||||
|
["Scheduler"]="src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Postgres/Migrations"
|
||||||
|
["Notify"]="src/Notify/__Libraries/StellaOps.Notify.Storage.Postgres/Migrations"
|
||||||
|
["Scanner"]="src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/Migrations"
|
||||||
|
["Scanner.Triage"]="src/Scanner/__Libraries/StellaOps.Scanner.Triage/Migrations"
|
||||||
|
["Attestor"]="src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Migrations"
|
||||||
|
["Signer"]="src/Signer/__Libraries/StellaOps.Signer.KeyManagement/Migrations"
|
||||||
|
["Signals"]="src/Signals/StellaOps.Signals.Storage.Postgres/Migrations"
|
||||||
|
["EvidenceLocker"]="src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Db/Migrations"
|
||||||
|
["ExportCenter"]="src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/Db/Migrations"
|
||||||
|
["IssuerDirectory"]="src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Storage.Postgres/Migrations"
|
||||||
|
["Orchestrator"]="src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/migrations"
|
||||||
|
["TimelineIndexer"]="src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/Db/Migrations"
|
||||||
|
["BinaryIndex"]="src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Persistence/Migrations"
|
||||||
|
["Unknowns"]="src/Unknowns/__Libraries/StellaOps.Unknowns.Storage.Postgres/Migrations"
|
||||||
|
["VexHub"]="src/VexHub/__Libraries/StellaOps.VexHub.Storage.Postgres/Migrations"
|
||||||
|
)
|
||||||
|
|
||||||
|
for module in "${!MIGRATION_PATHS[@]}"; do
|
||||||
|
path="$REPO_ROOT/${MIGRATION_PATHS[$module]}"
|
||||||
|
if [ -d "$path" ]; then
|
||||||
|
echo "Checking: $module"
|
||||||
|
check_duplicates "$path" "$module"
|
||||||
|
check_naming "$path" "$module"
|
||||||
|
check_dangerous_ops "$path" "$module"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Report errors
|
||||||
|
if [ ${#ERRORS[@]} -gt 0 ]; then
|
||||||
|
echo -e "${RED}=== ERRORS (${#ERRORS[@]}) ===${NC}"
|
||||||
|
for error in "${ERRORS[@]}"; do
|
||||||
|
echo -e "${RED} ✗ $error${NC}"
|
||||||
|
done
|
||||||
|
EXIT_CODE=1
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Report warnings
|
||||||
|
if [ ${#WARNINGS[@]} -gt 0 ]; then
|
||||||
|
echo -e "${YELLOW}=== WARNINGS (${#WARNINGS[@]}) ===${NC}"
|
||||||
|
for warning in "${WARNINGS[@]}"; do
|
||||||
|
echo -e "${YELLOW} ⚠ $warning${NC}"
|
||||||
|
done
|
||||||
|
if [ "$STRICT_MODE" = true ]; then
|
||||||
|
EXIT_CODE=1
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Scanner fix suggestions
|
||||||
|
if [ "$FIX_SCANNER" = true ]; then
|
||||||
|
echo "=== Scanner Migration Rename Suggestions ==="
|
||||||
|
echo "# Run these commands to fix Scanner duplicate migrations:"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
SCANNER_DIR="$REPO_ROOT/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/Migrations"
|
||||||
|
if [ -d "$SCANNER_DIR" ]; then
|
||||||
|
# Map old names to new sequential numbers
|
||||||
|
cat << 'EOF'
|
||||||
|
# Before running: backup the schema_migrations table!
|
||||||
|
# After renaming: update schema_migrations.migration_name to match new names
|
||||||
|
|
||||||
|
cd src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/Migrations
|
||||||
|
|
||||||
|
# Fix duplicate 009 prefixes
|
||||||
|
git mv 009_call_graph_tables.sql 020_call_graph_tables.sql
|
||||||
|
git mv 009_smart_diff_tables_search_path.sql 021_smart_diff_tables_search_path.sql
|
||||||
|
|
||||||
|
# Fix duplicate 010 prefixes
|
||||||
|
git mv 010_reachability_drift_tables.sql 022_reachability_drift_tables.sql
|
||||||
|
git mv 010_scanner_api_ingestion.sql 023_scanner_api_ingestion.sql
|
||||||
|
git mv 010_smart_diff_priority_score_widen.sql 024_smart_diff_priority_score_widen.sql
|
||||||
|
|
||||||
|
# Fix duplicate 014 prefixes
|
||||||
|
git mv 014_epss_triage_columns.sql 025_epss_triage_columns.sql
|
||||||
|
git mv 014_vuln_surfaces.sql 026_vuln_surfaces.sql
|
||||||
|
|
||||||
|
# Renumber subsequent migrations
|
||||||
|
git mv 011_epss_raw_layer.sql 027_epss_raw_layer.sql
|
||||||
|
git mv 012_epss_signal_layer.sql 028_epss_signal_layer.sql
|
||||||
|
git mv 013_witness_storage.sql 029_witness_storage.sql
|
||||||
|
git mv 015_vuln_surface_triggers_update.sql 030_vuln_surface_triggers_update.sql
|
||||||
|
git mv 016_reach_cache.sql 031_reach_cache.sql
|
||||||
|
git mv 017_idempotency_keys.sql 032_idempotency_keys.sql
|
||||||
|
git mv 018_binary_evidence.sql 033_binary_evidence.sql
|
||||||
|
git mv 019_func_proof_tables.sql 034_func_proof_tables.sql
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
if [ $EXIT_CODE -eq 0 ]; then
|
||||||
|
echo -e "${GREEN}=== VALIDATION PASSED ===${NC}"
|
||||||
|
else
|
||||||
|
echo -e "${RED}=== VALIDATION FAILED ===${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit $EXIT_CODE
|
||||||
244
.gitea/scripts/validate/validate-sbom.sh
Normal file
244
.gitea/scripts/validate/validate-sbom.sh
Normal file
@@ -0,0 +1,244 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# scripts/validate-sbom.sh
|
||||||
|
# Sprint: SPRINT_8200_0001_0003 - SBOM Schema Validation in CI
|
||||||
|
# Task: SCHEMA-8200-004 - Create validate-sbom.sh wrapper for sbom-utility
|
||||||
|
#
|
||||||
|
# Validates SBOM files against official CycloneDX JSON schemas.
|
||||||
|
# Uses sbom-utility for CycloneDX validation.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./scripts/validate-sbom.sh <sbom-file> [--schema <schema-path>]
|
||||||
|
# ./scripts/validate-sbom.sh src/__Tests/__Benchmarks/golden-corpus/sample.cyclonedx.json
|
||||||
|
# ./scripts/validate-sbom.sh --all # Validate all CycloneDX fixtures
|
||||||
|
#
|
||||||
|
# Exit codes:
|
||||||
|
# 0 - All validations passed
|
||||||
|
# 1 - Validation failed or error
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||||
|
SCHEMA_DIR="${REPO_ROOT}/docs/schemas"
|
||||||
|
DEFAULT_SCHEMA="${SCHEMA_DIR}/cyclonedx-bom-1.6.schema.json"
|
||||||
|
SBOM_UTILITY_VERSION="v0.16.0"
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
log_info() {
|
||||||
|
echo -e "${GREEN}[INFO]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_warn() {
|
||||||
|
echo -e "${YELLOW}[WARN]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_error() {
|
||||||
|
echo -e "${RED}[ERROR]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
check_sbom_utility() {
|
||||||
|
if ! command -v sbom-utility &> /dev/null; then
|
||||||
|
log_warn "sbom-utility not found in PATH"
|
||||||
|
log_info "Installing sbom-utility ${SBOM_UTILITY_VERSION}..."
|
||||||
|
|
||||||
|
# Detect OS and architecture
|
||||||
|
local os arch
|
||||||
|
case "$(uname -s)" in
|
||||||
|
Linux*) os="linux";;
|
||||||
|
Darwin*) os="darwin";;
|
||||||
|
MINGW*|MSYS*|CYGWIN*) os="windows";;
|
||||||
|
*) log_error "Unsupported OS: $(uname -s)"; exit 1;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
case "$(uname -m)" in
|
||||||
|
x86_64|amd64) arch="amd64";;
|
||||||
|
arm64|aarch64) arch="arm64";;
|
||||||
|
*) log_error "Unsupported architecture: $(uname -m)"; exit 1;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
local url="https://github.com/CycloneDX/sbom-utility/releases/download/${SBOM_UTILITY_VERSION}/sbom-utility-${SBOM_UTILITY_VERSION}-${os}-${arch}.tar.gz"
|
||||||
|
local temp_dir
|
||||||
|
temp_dir=$(mktemp -d)
|
||||||
|
|
||||||
|
log_info "Downloading from ${url}..."
|
||||||
|
curl -sSfL "${url}" | tar xz -C "${temp_dir}"
|
||||||
|
|
||||||
|
if [[ "$os" == "windows" ]]; then
|
||||||
|
log_info "Please add ${temp_dir}/sbom-utility.exe to your PATH"
|
||||||
|
export PATH="${temp_dir}:${PATH}"
|
||||||
|
else
|
||||||
|
log_info "Installing to /usr/local/bin (may require sudo)..."
|
||||||
|
if [[ -w /usr/local/bin ]]; then
|
||||||
|
mv "${temp_dir}/sbom-utility" /usr/local/bin/
|
||||||
|
else
|
||||||
|
sudo mv "${temp_dir}/sbom-utility" /usr/local/bin/
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -rf "${temp_dir}"
|
||||||
|
log_info "sbom-utility installed successfully"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_cyclonedx() {
|
||||||
|
local sbom_file="$1"
|
||||||
|
local schema="${2:-$DEFAULT_SCHEMA}"
|
||||||
|
|
||||||
|
if [[ ! -f "$sbom_file" ]]; then
|
||||||
|
log_error "File not found: $sbom_file"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -f "$schema" ]]; then
|
||||||
|
log_error "Schema not found: $schema"
|
||||||
|
log_info "Expected schema at: ${DEFAULT_SCHEMA}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Detect if it's a CycloneDX file
|
||||||
|
if ! grep -q '"bomFormat"' "$sbom_file" 2>/dev/null; then
|
||||||
|
log_warn "File does not appear to be CycloneDX: $sbom_file"
|
||||||
|
log_info "Skipping (use validate-spdx.sh for SPDX files)"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Validating: $sbom_file"
|
||||||
|
|
||||||
|
# Run sbom-utility validation
|
||||||
|
if sbom-utility validate --input-file "$sbom_file" --format json 2>&1; then
|
||||||
|
log_info "✓ Validation passed: $sbom_file"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
log_error "✗ Validation failed: $sbom_file"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_all() {
|
||||||
|
local fixture_dir="${REPO_ROOT}/src/__Tests/__Benchmarks/golden-corpus"
|
||||||
|
local failed=0
|
||||||
|
local passed=0
|
||||||
|
local skipped=0
|
||||||
|
|
||||||
|
log_info "Validating all CycloneDX fixtures in ${fixture_dir}..."
|
||||||
|
|
||||||
|
if [[ ! -d "$fixture_dir" ]]; then
|
||||||
|
log_error "Fixture directory not found: $fixture_dir"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
if grep -q '"bomFormat".*"CycloneDX"' "$file" 2>/dev/null; then
|
||||||
|
if validate_cyclonedx "$file"; then
|
||||||
|
((passed++))
|
||||||
|
else
|
||||||
|
((failed++))
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_info "Skipping non-CycloneDX file: $file"
|
||||||
|
((skipped++))
|
||||||
|
fi
|
||||||
|
done < <(find "$fixture_dir" -type f -name '*.json' -print0)
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_info "Validation Summary:"
|
||||||
|
log_info " Passed: ${passed}"
|
||||||
|
log_info " Failed: ${failed}"
|
||||||
|
log_info " Skipped: ${skipped}"
|
||||||
|
|
||||||
|
if [[ $failed -gt 0 ]]; then
|
||||||
|
log_error "Some validations failed!"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "All CycloneDX validations passed!"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat << EOF
|
||||||
|
Usage: $(basename "$0") [OPTIONS] <sbom-file>
|
||||||
|
|
||||||
|
Validates CycloneDX SBOM files against official JSON schemas.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--all Validate all CycloneDX fixtures in src/__Tests/__Benchmarks/golden-corpus/
|
||||||
|
--schema <path> Use custom schema file (default: docs/schemas/cyclonedx-bom-1.6.schema.json)
|
||||||
|
--help, -h Show this help message
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
$(basename "$0") sample.cyclonedx.json
|
||||||
|
$(basename "$0") --schema custom-schema.json sample.json
|
||||||
|
$(basename "$0") --all
|
||||||
|
|
||||||
|
Exit codes:
|
||||||
|
0 All validations passed
|
||||||
|
1 Validation failed or error
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
main() {
|
||||||
|
local schema="$DEFAULT_SCHEMA"
|
||||||
|
local validate_all_flag=false
|
||||||
|
local files=()
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--all)
|
||||||
|
validate_all_flag=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--schema)
|
||||||
|
schema="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--help|-h)
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
-*)
|
||||||
|
log_error "Unknown option: $1"
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
files+=("$1")
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Ensure sbom-utility is available
|
||||||
|
check_sbom_utility
|
||||||
|
|
||||||
|
if [[ "$validate_all_flag" == "true" ]]; then
|
||||||
|
validate_all
|
||||||
|
exit $?
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#files[@]} -eq 0 ]]; then
|
||||||
|
log_error "No SBOM file specified"
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
local failed=0
|
||||||
|
for file in "${files[@]}"; do
|
||||||
|
if ! validate_cyclonedx "$file" "$schema"; then
|
||||||
|
((failed++))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ $failed -gt 0 ]]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
main "$@"
|
||||||
277
.gitea/scripts/validate/validate-spdx.sh
Normal file
277
.gitea/scripts/validate/validate-spdx.sh
Normal file
@@ -0,0 +1,277 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# scripts/validate-spdx.sh
|
||||||
|
# Sprint: SPRINT_8200_0001_0003 - SBOM Schema Validation in CI
|
||||||
|
# Task: SCHEMA-8200-005 - Create validate-spdx.sh wrapper for SPDX validation
|
||||||
|
#
|
||||||
|
# Validates SPDX files against SPDX 3.0.1 JSON schema.
|
||||||
|
# Uses pyspdxtools (spdx-tools) for SPDX validation.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./scripts/validate-spdx.sh <spdx-file>
|
||||||
|
# ./scripts/validate-spdx.sh bench/golden-corpus/sample.spdx.json
|
||||||
|
# ./scripts/validate-spdx.sh --all # Validate all SPDX fixtures
|
||||||
|
#
|
||||||
|
# Exit codes:
|
||||||
|
# 0 - All validations passed
|
||||||
|
# 1 - Validation failed or error
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||||
|
SCHEMA_DIR="${REPO_ROOT}/docs/schemas"
|
||||||
|
DEFAULT_SCHEMA="${SCHEMA_DIR}/spdx-jsonld-3.0.1.schema.json"
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
log_info() {
|
||||||
|
echo -e "${GREEN}[INFO]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_warn() {
|
||||||
|
echo -e "${YELLOW}[WARN]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_error() {
|
||||||
|
echo -e "${RED}[ERROR]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
check_spdx_tools() {
|
||||||
|
if ! command -v pyspdxtools &> /dev/null; then
|
||||||
|
log_warn "pyspdxtools not found in PATH"
|
||||||
|
log_info "Installing spdx-tools via pip..."
|
||||||
|
|
||||||
|
if command -v pip3 &> /dev/null; then
|
||||||
|
pip3 install --user spdx-tools
|
||||||
|
elif command -v pip &> /dev/null; then
|
||||||
|
pip install --user spdx-tools
|
||||||
|
else
|
||||||
|
log_error "pip not found. Please install Python and pip first."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "spdx-tools installed successfully"
|
||||||
|
|
||||||
|
# Refresh PATH for newly installed tools
|
||||||
|
if [[ -d "${HOME}/.local/bin" ]]; then
|
||||||
|
export PATH="${HOME}/.local/bin:${PATH}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
check_ajv() {
|
||||||
|
if ! command -v ajv &> /dev/null; then
|
||||||
|
log_warn "ajv-cli not found in PATH"
|
||||||
|
log_info "Installing ajv-cli via npm..."
|
||||||
|
|
||||||
|
if command -v npm &> /dev/null; then
|
||||||
|
npm install -g ajv-cli ajv-formats
|
||||||
|
else
|
||||||
|
log_warn "npm not found. JSON schema validation will be skipped."
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "ajv-cli installed successfully"
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_spdx_schema() {
|
||||||
|
local spdx_file="$1"
|
||||||
|
local schema="$2"
|
||||||
|
|
||||||
|
if check_ajv; then
|
||||||
|
log_info "Validating against JSON schema: $schema"
|
||||||
|
if ajv validate -s "$schema" -d "$spdx_file" --spec=draft2020 2>&1; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_warn "Skipping JSON schema validation (ajv not available)"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_spdx() {
|
||||||
|
local spdx_file="$1"
|
||||||
|
local schema="${2:-$DEFAULT_SCHEMA}"
|
||||||
|
|
||||||
|
if [[ ! -f "$spdx_file" ]]; then
|
||||||
|
log_error "File not found: $spdx_file"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Detect if it's an SPDX file (JSON-LD format)
|
||||||
|
if ! grep -qE '"@context"|"spdxId"|"spdxVersion"' "$spdx_file" 2>/dev/null; then
|
||||||
|
log_warn "File does not appear to be SPDX: $spdx_file"
|
||||||
|
log_info "Skipping (use validate-sbom.sh for CycloneDX files)"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Validating: $spdx_file"
|
||||||
|
|
||||||
|
local validation_passed=true
|
||||||
|
|
||||||
|
# Try pyspdxtools validation first (semantic validation)
|
||||||
|
if command -v pyspdxtools &> /dev/null; then
|
||||||
|
log_info "Running SPDX semantic validation..."
|
||||||
|
if pyspdxtools validate "$spdx_file" 2>&1; then
|
||||||
|
log_info "✓ SPDX semantic validation passed"
|
||||||
|
else
|
||||||
|
# pyspdxtools may not support SPDX 3.0 yet
|
||||||
|
log_warn "pyspdxtools validation failed or not supported for this format"
|
||||||
|
log_info "Falling back to JSON schema validation only"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# JSON schema validation (syntax validation)
|
||||||
|
if [[ -f "$schema" ]]; then
|
||||||
|
if validate_spdx_schema "$spdx_file" "$schema"; then
|
||||||
|
log_info "✓ JSON schema validation passed"
|
||||||
|
else
|
||||||
|
log_error "✗ JSON schema validation failed"
|
||||||
|
validation_passed=false
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_warn "Schema file not found: $schema"
|
||||||
|
log_info "Skipping schema validation"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$validation_passed" == "true" ]]; then
|
||||||
|
log_info "✓ Validation passed: $spdx_file"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
log_error "✗ Validation failed: $spdx_file"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_all() {
|
||||||
|
local fixture_dir="${REPO_ROOT}/bench/golden-corpus"
|
||||||
|
local failed=0
|
||||||
|
local passed=0
|
||||||
|
local skipped=0
|
||||||
|
|
||||||
|
log_info "Validating all SPDX fixtures in ${fixture_dir}..."
|
||||||
|
|
||||||
|
if [[ ! -d "$fixture_dir" ]]; then
|
||||||
|
log_error "Fixture directory not found: $fixture_dir"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
# Check if it's an SPDX file
|
||||||
|
if grep -qE '"@context"|"spdxVersion"' "$file" 2>/dev/null; then
|
||||||
|
if validate_spdx "$file"; then
|
||||||
|
((passed++))
|
||||||
|
else
|
||||||
|
((failed++))
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_info "Skipping non-SPDX file: $file"
|
||||||
|
((skipped++))
|
||||||
|
fi
|
||||||
|
done < <(find "$fixture_dir" -type f \( -name '*spdx*.json' -o -name '*.spdx.json' \) -print0)
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_info "Validation Summary:"
|
||||||
|
log_info " Passed: ${passed}"
|
||||||
|
log_info " Failed: ${failed}"
|
||||||
|
log_info " Skipped: ${skipped}"
|
||||||
|
|
||||||
|
if [[ $failed -gt 0 ]]; then
|
||||||
|
log_error "Some validations failed!"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "All SPDX validations passed!"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat << EOF
|
||||||
|
Usage: $(basename "$0") [OPTIONS] <spdx-file>
|
||||||
|
|
||||||
|
Validates SPDX files against SPDX 3.0.1 JSON schema.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--all Validate all SPDX fixtures in bench/golden-corpus/
|
||||||
|
--schema <path> Use custom schema file (default: docs/schemas/spdx-jsonld-3.0.1.schema.json)
|
||||||
|
--help, -h Show this help message
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
$(basename "$0") sample.spdx.json
|
||||||
|
$(basename "$0") --schema custom-schema.json sample.json
|
||||||
|
$(basename "$0") --all
|
||||||
|
|
||||||
|
Exit codes:
|
||||||
|
0 All validations passed
|
||||||
|
1 Validation failed or error
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
main() {
|
||||||
|
local schema="$DEFAULT_SCHEMA"
|
||||||
|
local validate_all_flag=false
|
||||||
|
local files=()
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--all)
|
||||||
|
validate_all_flag=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--schema)
|
||||||
|
schema="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--help|-h)
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
-*)
|
||||||
|
log_error "Unknown option: $1"
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
files+=("$1")
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Ensure tools are available
|
||||||
|
check_spdx_tools || true # Continue even if pyspdxtools install fails
|
||||||
|
|
||||||
|
if [[ "$validate_all_flag" == "true" ]]; then
|
||||||
|
validate_all
|
||||||
|
exit $?
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#files[@]} -eq 0 ]]; then
|
||||||
|
log_error "No SPDX file specified"
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
local failed=0
|
||||||
|
for file in "${files[@]}"; do
|
||||||
|
if ! validate_spdx "$file" "$schema"; then
|
||||||
|
((failed++))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ $failed -gt 0 ]]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
main "$@"
|
||||||
261
.gitea/scripts/validate/validate-vex.sh
Normal file
261
.gitea/scripts/validate/validate-vex.sh
Normal file
@@ -0,0 +1,261 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# scripts/validate-vex.sh
|
||||||
|
# Sprint: SPRINT_8200_0001_0003 - SBOM Schema Validation in CI
|
||||||
|
# Task: SCHEMA-8200-006 - Create validate-vex.sh wrapper for OpenVEX validation
|
||||||
|
#
|
||||||
|
# Validates OpenVEX files against the OpenVEX 0.2.0 JSON schema.
|
||||||
|
# Uses ajv-cli for JSON schema validation.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./scripts/validate-vex.sh <vex-file>
|
||||||
|
# ./scripts/validate-vex.sh bench/golden-corpus/sample.vex.json
|
||||||
|
# ./scripts/validate-vex.sh --all # Validate all VEX fixtures
|
||||||
|
#
|
||||||
|
# Exit codes:
|
||||||
|
# 0 - All validations passed
|
||||||
|
# 1 - Validation failed or error
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||||
|
SCHEMA_DIR="${REPO_ROOT}/docs/schemas"
|
||||||
|
DEFAULT_SCHEMA="${SCHEMA_DIR}/openvex-0.2.0.schema.json"
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
log_info() {
|
||||||
|
echo -e "${GREEN}[INFO]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_warn() {
|
||||||
|
echo -e "${YELLOW}[WARN]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_error() {
|
||||||
|
echo -e "${RED}[ERROR]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
check_ajv() {
|
||||||
|
if ! command -v ajv &> /dev/null; then
|
||||||
|
log_warn "ajv-cli not found in PATH"
|
||||||
|
log_info "Installing ajv-cli via npm..."
|
||||||
|
|
||||||
|
if command -v npm &> /dev/null; then
|
||||||
|
npm install -g ajv-cli ajv-formats
|
||||||
|
elif command -v npx &> /dev/null; then
|
||||||
|
log_info "Using npx for ajv (no global install)"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
log_error "npm/npx not found. Please install Node.js first."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "ajv-cli installed successfully"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
run_ajv() {
|
||||||
|
local schema="$1"
|
||||||
|
local data="$2"
|
||||||
|
|
||||||
|
if command -v ajv &> /dev/null; then
|
||||||
|
ajv validate -s "$schema" -d "$data" --spec=draft2020 2>&1
|
||||||
|
elif command -v npx &> /dev/null; then
|
||||||
|
npx ajv-cli validate -s "$schema" -d "$data" --spec=draft2020 2>&1
|
||||||
|
else
|
||||||
|
log_error "No ajv available"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_openvex() {
|
||||||
|
local vex_file="$1"
|
||||||
|
local schema="${2:-$DEFAULT_SCHEMA}"
|
||||||
|
|
||||||
|
if [[ ! -f "$vex_file" ]]; then
|
||||||
|
log_error "File not found: $vex_file"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -f "$schema" ]]; then
|
||||||
|
log_error "Schema not found: $schema"
|
||||||
|
log_info "Expected schema at: ${DEFAULT_SCHEMA}"
|
||||||
|
log_info "Download from: https://raw.githubusercontent.com/openvex/spec/main/openvex_json_schema.json"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Detect if it's an OpenVEX file
|
||||||
|
if ! grep -qE '"@context".*"https://openvex.dev/ns"|"openvex"' "$vex_file" 2>/dev/null; then
|
||||||
|
log_warn "File does not appear to be OpenVEX: $vex_file"
|
||||||
|
log_info "Skipping (use validate-sbom.sh for CycloneDX files)"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Validating: $vex_file"
|
||||||
|
|
||||||
|
# Run ajv validation
|
||||||
|
if run_ajv "$schema" "$vex_file"; then
|
||||||
|
log_info "✓ Validation passed: $vex_file"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
log_error "✗ Validation failed: $vex_file"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_all() {
|
||||||
|
local failed=0
|
||||||
|
local passed=0
|
||||||
|
local skipped=0
|
||||||
|
|
||||||
|
# Search multiple directories for VEX files
|
||||||
|
local search_dirs=(
|
||||||
|
"${REPO_ROOT}/bench/golden-corpus"
|
||||||
|
"${REPO_ROOT}/bench/vex-lattice"
|
||||||
|
"${REPO_ROOT}/datasets"
|
||||||
|
)
|
||||||
|
|
||||||
|
log_info "Validating all OpenVEX fixtures..."
|
||||||
|
|
||||||
|
for fixture_dir in "${search_dirs[@]}"; do
|
||||||
|
if [[ ! -d "$fixture_dir" ]]; then
|
||||||
|
log_warn "Directory not found, skipping: $fixture_dir"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Searching in: $fixture_dir"
|
||||||
|
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
# Check if it's an OpenVEX file
|
||||||
|
if grep -qE '"@context".*"https://openvex.dev/ns"|"openvex"' "$file" 2>/dev/null; then
|
||||||
|
if validate_openvex "$file"; then
|
||||||
|
((passed++))
|
||||||
|
else
|
||||||
|
((failed++))
|
||||||
|
fi
|
||||||
|
elif grep -q '"vex"' "$file" 2>/dev/null || [[ "$file" == *vex* ]]; then
|
||||||
|
# Might be VEX-related but not OpenVEX format
|
||||||
|
log_info "Checking potential VEX file: $file"
|
||||||
|
if grep -qE '"@context"' "$file" 2>/dev/null; then
|
||||||
|
if validate_openvex "$file"; then
|
||||||
|
((passed++))
|
||||||
|
else
|
||||||
|
((failed++))
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_info "Skipping non-OpenVEX file: $file"
|
||||||
|
((skipped++))
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
((skipped++))
|
||||||
|
fi
|
||||||
|
done < <(find "$fixture_dir" -type f \( -name '*vex*.json' -o -name '*.vex.json' -o -name '*openvex*.json' \) -print0 2>/dev/null || true)
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_info "Validation Summary:"
|
||||||
|
log_info " Passed: ${passed}"
|
||||||
|
log_info " Failed: ${failed}"
|
||||||
|
log_info " Skipped: ${skipped}"
|
||||||
|
|
||||||
|
if [[ $failed -gt 0 ]]; then
|
||||||
|
log_error "Some validations failed!"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $passed -eq 0 ]] && [[ $skipped -eq 0 ]]; then
|
||||||
|
log_warn "No OpenVEX files found to validate"
|
||||||
|
else
|
||||||
|
log_info "All OpenVEX validations passed!"
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat << EOF
|
||||||
|
Usage: $(basename "$0") [OPTIONS] <vex-file>
|
||||||
|
|
||||||
|
Validates OpenVEX files against the OpenVEX 0.2.0 JSON schema.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--all Validate all OpenVEX fixtures in bench/ and datasets/
|
||||||
|
--schema <path> Use custom schema file (default: docs/schemas/openvex-0.2.0.schema.json)
|
||||||
|
--help, -h Show this help message
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
$(basename "$0") sample.vex.json
|
||||||
|
$(basename "$0") --schema custom-schema.json sample.json
|
||||||
|
$(basename "$0") --all
|
||||||
|
|
||||||
|
Exit codes:
|
||||||
|
0 All validations passed
|
||||||
|
1 Validation failed or error
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
main() {
|
||||||
|
local schema="$DEFAULT_SCHEMA"
|
||||||
|
local validate_all_flag=false
|
||||||
|
local files=()
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--all)
|
||||||
|
validate_all_flag=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--schema)
|
||||||
|
schema="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--help|-h)
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
-*)
|
||||||
|
log_error "Unknown option: $1"
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
files+=("$1")
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Ensure ajv is available
|
||||||
|
check_ajv
|
||||||
|
|
||||||
|
if [[ "$validate_all_flag" == "true" ]]; then
|
||||||
|
validate_all
|
||||||
|
exit $?
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#files[@]} -eq 0 ]]; then
|
||||||
|
log_error "No VEX file specified"
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
local failed=0
|
||||||
|
for file in "${files[@]}"; do
|
||||||
|
if ! validate_openvex "$file" "$schema"; then
|
||||||
|
((failed++))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ $failed -gt 0 ]]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
main "$@"
|
||||||
224
.gitea/scripts/validate/validate-workflows.sh
Normal file
224
.gitea/scripts/validate/validate-workflows.sh
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# validate-workflows.sh - Validate Gitea Actions workflows
|
||||||
|
# Sprint: SPRINT_20251226_001_CICD
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./validate-workflows.sh # Validate all workflows
|
||||||
|
# ./validate-workflows.sh --strict # Fail on any warning
|
||||||
|
# ./validate-workflows.sh --verbose # Show detailed output
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||||
|
WORKFLOWS_DIR="$REPO_ROOT/.gitea/workflows"
|
||||||
|
SCRIPTS_DIR="$REPO_ROOT/.gitea/scripts"
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
STRICT_MODE=false
|
||||||
|
VERBOSE=false
|
||||||
|
|
||||||
|
# Counters
|
||||||
|
PASSED=0
|
||||||
|
FAILED=0
|
||||||
|
WARNINGS=0
|
||||||
|
|
||||||
|
# Colors (if terminal supports it)
|
||||||
|
if [[ -t 1 ]]; then
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[0;33m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
else
|
||||||
|
RED=''
|
||||||
|
GREEN=''
|
||||||
|
YELLOW=''
|
||||||
|
NC=''
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--strict)
|
||||||
|
STRICT_MODE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--verbose)
|
||||||
|
VERBOSE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--help)
|
||||||
|
echo "Usage: $0 [OPTIONS]"
|
||||||
|
echo ""
|
||||||
|
echo "Options:"
|
||||||
|
echo " --strict Fail on any warning"
|
||||||
|
echo " --verbose Show detailed output"
|
||||||
|
echo " --help Show this help message"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "=== Gitea Workflow Validation ==="
|
||||||
|
echo "Workflows: $WORKFLOWS_DIR"
|
||||||
|
echo "Scripts: $SCRIPTS_DIR"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check if workflows directory exists
|
||||||
|
if [[ ! -d "$WORKFLOWS_DIR" ]]; then
|
||||||
|
echo -e "${RED}ERROR: Workflows directory not found${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Function to validate YAML syntax
|
||||||
|
validate_yaml_syntax() {
|
||||||
|
local file=$1
|
||||||
|
local name=$(basename "$file")
|
||||||
|
|
||||||
|
# Try python yaml parser first
|
||||||
|
if command -v python3 &>/dev/null; then
|
||||||
|
if python3 -c "import yaml; yaml.safe_load(open('$file'))" 2>/dev/null; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
# Fallback to ruby if available
|
||||||
|
elif command -v ruby &>/dev/null; then
|
||||||
|
if ruby -ryaml -e "YAML.load_file('$file')" 2>/dev/null; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Can't validate YAML, warn and skip
|
||||||
|
return 2
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to extract script references from a workflow
|
||||||
|
extract_script_refs() {
|
||||||
|
local file=$1
|
||||||
|
# Look for patterns like: .gitea/scripts/*, scripts/*, ./devops/scripts/*
|
||||||
|
grep -oE '(\.gitea/scripts|scripts|devops/scripts)/[a-zA-Z0-9_/-]+\.(sh|py|js|mjs)' "$file" 2>/dev/null | sort -u || true
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to check if a script exists
|
||||||
|
check_script_exists() {
|
||||||
|
local script_path=$1
|
||||||
|
local full_path="$REPO_ROOT/$script_path"
|
||||||
|
|
||||||
|
if [[ -f "$full_path" ]]; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Validate each workflow file
|
||||||
|
echo "=== Validating Workflow Syntax ==="
|
||||||
|
for workflow in "$WORKFLOWS_DIR"/*.yml "$WORKFLOWS_DIR"/*.yaml; do
|
||||||
|
[[ -e "$workflow" ]] || continue
|
||||||
|
|
||||||
|
name=$(basename "$workflow")
|
||||||
|
|
||||||
|
if [[ "$VERBOSE" == "true" ]]; then
|
||||||
|
echo "Checking: $name"
|
||||||
|
fi
|
||||||
|
|
||||||
|
result=$(validate_yaml_syntax "$workflow")
|
||||||
|
exit_code=$?
|
||||||
|
|
||||||
|
if [[ $exit_code -eq 0 ]]; then
|
||||||
|
echo -e " ${GREEN}[PASS]${NC} $name - YAML syntax valid"
|
||||||
|
((PASSED++))
|
||||||
|
elif [[ $exit_code -eq 2 ]]; then
|
||||||
|
echo -e " ${YELLOW}[SKIP]${NC} $name - No YAML parser available"
|
||||||
|
((WARNINGS++))
|
||||||
|
else
|
||||||
|
echo -e " ${RED}[FAIL]${NC} $name - YAML syntax error"
|
||||||
|
((FAILED++))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Validating Script References ==="
|
||||||
|
|
||||||
|
# Check all script references
|
||||||
|
MISSING_SCRIPTS=()
|
||||||
|
for workflow in "$WORKFLOWS_DIR"/*.yml "$WORKFLOWS_DIR"/*.yaml; do
|
||||||
|
[[ -e "$workflow" ]] || continue
|
||||||
|
|
||||||
|
name=$(basename "$workflow")
|
||||||
|
refs=$(extract_script_refs "$workflow")
|
||||||
|
|
||||||
|
if [[ -z "$refs" ]]; then
|
||||||
|
if [[ "$VERBOSE" == "true" ]]; then
|
||||||
|
echo " $name: No script references found"
|
||||||
|
fi
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
while IFS= read -r script_ref; do
|
||||||
|
[[ -z "$script_ref" ]] && continue
|
||||||
|
|
||||||
|
if check_script_exists "$script_ref"; then
|
||||||
|
if [[ "$VERBOSE" == "true" ]]; then
|
||||||
|
echo -e " ${GREEN}[OK]${NC} $name -> $script_ref"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e " ${RED}[MISSING]${NC} $name -> $script_ref"
|
||||||
|
MISSING_SCRIPTS+=("$name: $script_ref")
|
||||||
|
((WARNINGS++))
|
||||||
|
fi
|
||||||
|
done <<< "$refs"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Check that .gitea/scripts directories exist
|
||||||
|
echo ""
|
||||||
|
echo "=== Validating Script Directory Structure ==="
|
||||||
|
EXPECTED_DIRS=(build test validate sign release metrics evidence util)
|
||||||
|
for dir in "${EXPECTED_DIRS[@]}"; do
|
||||||
|
dir_path="$SCRIPTS_DIR/$dir"
|
||||||
|
if [[ -d "$dir_path" ]]; then
|
||||||
|
script_count=$(find "$dir_path" -maxdepth 1 -name "*.sh" -o -name "*.py" 2>/dev/null | wc -l)
|
||||||
|
echo -e " ${GREEN}[OK]${NC} $dir/ ($script_count scripts)"
|
||||||
|
else
|
||||||
|
echo -e " ${YELLOW}[WARN]${NC} $dir/ - Directory not found"
|
||||||
|
((WARNINGS++))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
echo ""
|
||||||
|
echo "=== Validation Summary ==="
|
||||||
|
echo -e " Passed: ${GREEN}$PASSED${NC}"
|
||||||
|
echo -e " Failed: ${RED}$FAILED${NC}"
|
||||||
|
echo -e " Warnings: ${YELLOW}$WARNINGS${NC}"
|
||||||
|
|
||||||
|
if [[ ${#MISSING_SCRIPTS[@]} -gt 0 ]]; then
|
||||||
|
echo ""
|
||||||
|
echo "Missing script references:"
|
||||||
|
for ref in "${MISSING_SCRIPTS[@]}"; do
|
||||||
|
echo " - $ref"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Exit code
|
||||||
|
if [[ $FAILED -gt 0 ]]; then
|
||||||
|
echo ""
|
||||||
|
echo -e "${RED}FAILED: $FAILED validation(s) failed${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$STRICT_MODE" == "true" && $WARNINGS -gt 0 ]]; then
|
||||||
|
echo ""
|
||||||
|
echo -e "${YELLOW}STRICT MODE: $WARNINGS warning(s) treated as errors${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -e "${GREEN}All validations passed!${NC}"
|
||||||
25
.gitea/scripts/validate/verify-binaries.sh
Normal file
25
.gitea/scripts/validate/verify-binaries.sh
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Verifies binary artefacts live only in approved locations.
|
||||||
|
# Allowed roots: .nuget/packages (curated feed + cache), vendor (pinned binaries),
|
||||||
|
# offline (air-gap bundles/templates), plugins/tools/deploy/ops (module-owned binaries).
|
||||||
|
|
||||||
|
repo_root="$(git rev-parse --show-toplevel)"
|
||||||
|
cd "$repo_root"
|
||||||
|
|
||||||
|
# Extensions considered binary artefacts.
|
||||||
|
binary_ext="(nupkg|dll|exe|so|dylib|a|lib|tar|tar.gz|tgz|zip|jar|deb|rpm|bin)"
|
||||||
|
# Locations allowed to contain binaries.
|
||||||
|
allowed_prefix="^(.nuget/packages|.nuget/packages/packages|vendor|offline|plugins|tools|deploy|ops|third_party|docs/artifacts|samples|src/.*/Fixtures|src/.*/fixtures)/"
|
||||||
|
|
||||||
|
# Only consider files that currently exist in the working tree (skip deleted placeholders).
|
||||||
|
violations=$(git ls-files | while read -r f; do [[ -f "$f" ]] && echo "$f"; done | grep -E "\\.${binary_ext}$" | grep -Ev "$allowed_prefix" || true)
|
||||||
|
|
||||||
|
if [[ -n "$violations" ]]; then
|
||||||
|
echo "Binary artefacts found outside approved directories:" >&2
|
||||||
|
echo "$violations" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "Binary layout OK (allowed roots: %s)\n" "$allowed_prefix"
|
||||||
70
.gitea/workflows/advisory-ai-release.yml
Normal file
70
.gitea/workflows/advisory-ai-release.yml
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
name: Advisory AI Feed Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
allow_dev_key:
|
||||||
|
description: 'Allow dev key for testing (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/AdvisoryAI/feeds/**'
|
||||||
|
- 'docs/samples/advisory-feeds/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
package-feeds:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.6.0'
|
||||||
|
|
||||||
|
- name: Fallback to dev key when secret is absent
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
echo "[warn] COSIGN_PRIVATE_KEY_B64 not set; using dev key for non-production"
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
# Manual override
|
||||||
|
if [ "${{ github.event.inputs.allow_dev_key }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Package advisory feeds
|
||||||
|
run: |
|
||||||
|
chmod +x ops/deployment/advisory-ai/package-advisory-feeds.sh
|
||||||
|
ops/deployment/advisory-ai/package-advisory-feeds.sh
|
||||||
|
|
||||||
|
- name: Generate SBOM
|
||||||
|
run: |
|
||||||
|
# Install syft
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.0.0
|
||||||
|
|
||||||
|
# Generate SBOM for feed bundle
|
||||||
|
syft dir:out/advisory-ai/feeds/stage \
|
||||||
|
-o spdx-json=out/advisory-ai/feeds/advisory-feeds.sbom.json \
|
||||||
|
--name advisory-feeds
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: advisory-feeds-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.tar.gz
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.manifest.json
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.manifest.dsse.json
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.sbom.json
|
||||||
|
out/advisory-ai/feeds/provenance.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
28
.gitea/workflows/airgap-sealed-ci.yml
Normal file
28
.gitea/workflows/airgap-sealed-ci.yml
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
name: Airgap Sealed CI Smoke
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'devops/airgap/**'
|
||||||
|
- '.gitea/workflows/airgap-sealed-ci.yml'
|
||||||
|
pull_request:
|
||||||
|
branches: [ main, develop ]
|
||||||
|
paths:
|
||||||
|
- 'devops/airgap/**'
|
||||||
|
- '.gitea/workflows/airgap-sealed-ci.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
sealed-smoke:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
- name: Install dnslib
|
||||||
|
run: pip install dnslib
|
||||||
|
- name: Run sealed-mode smoke
|
||||||
|
run: sudo devops/airgap/sealed-ci-smoke.sh
|
||||||
83
.gitea/workflows/aoc-backfill-release.yml
Normal file
83
.gitea/workflows/aoc-backfill-release.yml
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
name: AOC Backfill Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
dataset_hash:
|
||||||
|
description: 'Dataset hash from dev rehearsal (leave empty for dev mode)'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
allow_dev_key:
|
||||||
|
description: 'Allow dev key for testing (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
package-backfill:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.6.0'
|
||||||
|
|
||||||
|
- name: Restore AOC CLI
|
||||||
|
run: dotnet restore src/Aoc/StellaOps.Aoc.Cli/StellaOps.Aoc.Cli.csproj
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
echo "[info] No production key; using dev key"
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
if [ "${{ github.event.inputs.allow_dev_key }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Package AOC backfill release
|
||||||
|
run: |
|
||||||
|
chmod +x devops/aoc/package-backfill-release.sh
|
||||||
|
DATASET_HASH="${{ github.event.inputs.dataset_hash }}" \
|
||||||
|
devops/aoc/package-backfill-release.sh
|
||||||
|
env:
|
||||||
|
DATASET_HASH: ${{ github.event.inputs.dataset_hash }}
|
||||||
|
|
||||||
|
- name: Generate SBOM with syft
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.0.0
|
||||||
|
syft dir:out/aoc/cli \
|
||||||
|
-o spdx-json=out/aoc/aoc-backfill-runner.sbom.json \
|
||||||
|
--name aoc-backfill-runner || true
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/aoc
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: aoc-backfill-release-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/aoc/aoc-backfill-runner.tar.gz
|
||||||
|
out/aoc/aoc-backfill-runner.manifest.json
|
||||||
|
out/aoc/aoc-backfill-runner.sbom.json
|
||||||
|
out/aoc/aoc-backfill-runner.provenance.json
|
||||||
|
out/aoc/aoc-backfill-runner.dsse.json
|
||||||
|
out/aoc/SHA256SUMS
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
170
.gitea/workflows/aoc-guard.yml
Normal file
170
.gitea/workflows/aoc-guard.yml
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
name: AOC Guard CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/Aoc/**'
|
||||||
|
- 'src/Concelier/**'
|
||||||
|
- 'src/Authority/**'
|
||||||
|
- 'src/Excititor/**'
|
||||||
|
- 'devops/aoc/**'
|
||||||
|
- '.gitea/workflows/aoc-guard.yml'
|
||||||
|
pull_request:
|
||||||
|
branches: [ main, develop ]
|
||||||
|
paths:
|
||||||
|
- 'src/Aoc/**'
|
||||||
|
- 'src/Concelier/**'
|
||||||
|
- 'src/Authority/**'
|
||||||
|
- 'src/Excititor/**'
|
||||||
|
- 'devops/aoc/**'
|
||||||
|
- '.gitea/workflows/aoc-guard.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
aoc-guard:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||||
|
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||||
|
|
||||||
|
- name: Set up .NET SDK
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore analyzers
|
||||||
|
run: dotnet restore src/Aoc/__Analyzers/StellaOps.Aoc.Analyzers/StellaOps.Aoc.Analyzers.csproj
|
||||||
|
|
||||||
|
- name: Build analyzers
|
||||||
|
run: dotnet build src/Aoc/__Analyzers/StellaOps.Aoc.Analyzers/StellaOps.Aoc.Analyzers.csproj -c Release
|
||||||
|
|
||||||
|
- name: Run analyzers against ingestion projects
|
||||||
|
run: |
|
||||||
|
dotnet build src/Concelier/StellaOps.Concelier.Ingestion/StellaOps.Concelier.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||||
|
dotnet build src/Authority/StellaOps.Authority.Ingestion/StellaOps.Authority.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||||
|
dotnet build src/Excititor/StellaOps.Excititor.Ingestion/StellaOps.Excititor.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||||
|
|
||||||
|
- name: Run analyzer tests with coverage
|
||||||
|
run: |
|
||||||
|
mkdir -p $ARTIFACT_DIR
|
||||||
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Analyzers.Tests/StellaOps.Aoc.Analyzers.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-analyzers-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Run AOC library tests with coverage
|
||||||
|
run: |
|
||||||
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-lib-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Run AOC CLI tests with coverage
|
||||||
|
run: |
|
||||||
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Cli.Tests/StellaOps.Aoc.Cli.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-cli-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Generate coverage report
|
||||||
|
run: |
|
||||||
|
dotnet tool install --global dotnet-reportgenerator-globaltool || true
|
||||||
|
reportgenerator \
|
||||||
|
-reports:"$ARTIFACT_DIR/**/coverage.cobertura.xml" \
|
||||||
|
-targetdir:"$ARTIFACT_DIR/coverage-report" \
|
||||||
|
-reporttypes:"Html;Cobertura;TextSummary" || true
|
||||||
|
if [ -f "$ARTIFACT_DIR/coverage-report/Summary.txt" ]; then
|
||||||
|
cat "$ARTIFACT_DIR/coverage-report/Summary.txt"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: aoc-guard-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
|
|
||||||
|
aoc-verify:
|
||||||
|
needs: aoc-guard
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
if: github.event_name != 'schedule'
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
AOC_VERIFY_SINCE: ${{ github.event.pull_request.base.sha || 'HEAD~1' }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||||
|
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||||
|
|
||||||
|
- name: Set up .NET SDK
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Run AOC verify
|
||||||
|
env:
|
||||||
|
STAGING_MONGO_URI: ${{ secrets.STAGING_MONGO_URI || vars.STAGING_MONGO_URI }}
|
||||||
|
STAGING_POSTGRES_URI: ${{ secrets.STAGING_POSTGRES_URI || vars.STAGING_POSTGRES_URI }}
|
||||||
|
run: |
|
||||||
|
mkdir -p $ARTIFACT_DIR
|
||||||
|
|
||||||
|
# Prefer PostgreSQL, fall back to MongoDB (legacy)
|
||||||
|
if [ -n "${STAGING_POSTGRES_URI:-}" ]; then
|
||||||
|
echo "Using PostgreSQL for AOC verification"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--postgres "$STAGING_POSTGRES_URI" \
|
||||||
|
--output "$ARTIFACT_DIR/aoc-verify.json" \
|
||||||
|
--ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" \
|
||||||
|
--verbose || VERIFY_EXIT=$?
|
||||||
|
elif [ -n "${STAGING_MONGO_URI:-}" ]; then
|
||||||
|
echo "Using MongoDB for AOC verification (deprecated)"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--mongo "$STAGING_MONGO_URI" \
|
||||||
|
--output "$ARTIFACT_DIR/aoc-verify.json" \
|
||||||
|
--ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" \
|
||||||
|
--verbose || VERIFY_EXIT=$?
|
||||||
|
else
|
||||||
|
echo "::warning::Neither STAGING_POSTGRES_URI nor STAGING_MONGO_URI set; running dry-run verification"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--postgres "placeholder" \
|
||||||
|
--dry-run \
|
||||||
|
--verbose
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "${VERIFY_EXIT:-}" ] && [ "${VERIFY_EXIT}" -ne 0 ]; then
|
||||||
|
echo "::error::AOC verify reported violations"; exit ${VERIFY_EXIT}
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload verify artifacts
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: aoc-verify-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
51
.gitea/workflows/api-governance.yml
Normal file
51
.gitea/workflows/api-governance.yml
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
name: api-governance
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "src/Api/**"
|
||||||
|
- ".spectral.yaml"
|
||||||
|
- "package.json"
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "src/Api/**"
|
||||||
|
- ".spectral.yaml"
|
||||||
|
- "package.json"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
spectral-lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: "18"
|
||||||
|
- name: Install npm deps
|
||||||
|
run: npm install --ignore-scripts --no-progress
|
||||||
|
- name: Compose aggregate OpenAPI
|
||||||
|
run: npm run api:compose
|
||||||
|
- name: Validate examples coverage
|
||||||
|
run: npm run api:examples
|
||||||
|
- name: Compatibility diff (previous commit)
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
if git show HEAD~1:src/Api/StellaOps.Api.OpenApi/stella.yaml > /tmp/stella-prev.yaml 2>/dev/null; then
|
||||||
|
node scripts/api-compat-diff.mjs /tmp/stella-prev.yaml src/Api/StellaOps.Api.OpenApi/stella.yaml --output text --fail-on-breaking
|
||||||
|
else
|
||||||
|
echo "[api:compat] previous stella.yaml not found; skipping"
|
||||||
|
fi
|
||||||
|
- name: Compatibility diff (baseline)
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
if [ -f src/Api/StellaOps.Api.OpenApi/baselines/stella-baseline.yaml ]; then
|
||||||
|
node scripts/api-compat-diff.mjs src/Api/StellaOps.Api.OpenApi/baselines/stella-baseline.yaml src/Api/StellaOps.Api.OpenApi/stella.yaml --output text
|
||||||
|
else
|
||||||
|
echo "[api:compat] baseline file missing; skipping"
|
||||||
|
fi
|
||||||
|
- name: Generate changelog
|
||||||
|
run: npm run api:changelog
|
||||||
|
- name: Spectral lint (fail on warning+)
|
||||||
|
run: npm run api:lint
|
||||||
128
.gitea/workflows/artifact-signing.yml
Normal file
128
.gitea/workflows/artifact-signing.yml
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
name: Artifact Signing
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
artifact_path:
|
||||||
|
description: 'Path to artifact to sign'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
|
||||||
|
env:
|
||||||
|
COSIGN_VERSION: 'v2.2.0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
sign-containers:
|
||||||
|
name: Sign Container Images
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
id-token: write
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Sign images (keyless)
|
||||||
|
if: ${{ !env.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
env:
|
||||||
|
COSIGN_EXPERIMENTAL: "1"
|
||||||
|
run: |
|
||||||
|
IMAGES=(
|
||||||
|
"ghcr.io/${{ github.repository }}/concelier"
|
||||||
|
"ghcr.io/${{ github.repository }}/scanner"
|
||||||
|
"ghcr.io/${{ github.repository }}/authority"
|
||||||
|
)
|
||||||
|
for img in "${IMAGES[@]}"; do
|
||||||
|
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||||
|
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||||
|
cosign sign --yes "${img}:${{ github.ref_name }}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Sign images (with key)
|
||||||
|
if: ${{ env.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
run: |
|
||||||
|
echo "$COSIGN_PRIVATE_KEY" | base64 -d > /tmp/cosign.key
|
||||||
|
IMAGES=(
|
||||||
|
"ghcr.io/${{ github.repository }}/concelier"
|
||||||
|
"ghcr.io/${{ github.repository }}/scanner"
|
||||||
|
"ghcr.io/${{ github.repository }}/authority"
|
||||||
|
)
|
||||||
|
for img in "${IMAGES[@]}"; do
|
||||||
|
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||||
|
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||||
|
cosign sign --key /tmp/cosign.key "${img}:${{ github.ref_name }}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
rm -f /tmp/cosign.key
|
||||||
|
|
||||||
|
sign-sbom:
|
||||||
|
name: Sign SBOM Artifacts
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||||
|
|
||||||
|
- name: Generate and sign SBOM
|
||||||
|
run: |
|
||||||
|
# Generate SBOM using syft
|
||||||
|
if command -v syft &> /dev/null; then
|
||||||
|
syft . -o cyclonedx-json > sbom.cdx.json
|
||||||
|
cosign sign-blob --yes sbom.cdx.json --output-signature sbom.cdx.json.sig
|
||||||
|
else
|
||||||
|
echo "syft not installed, skipping SBOM generation"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload signed artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: signed-sbom
|
||||||
|
path: |
|
||||||
|
sbom.cdx.json
|
||||||
|
sbom.cdx.json.sig
|
||||||
|
if-no-files-found: ignore
|
||||||
|
|
||||||
|
verify-signatures:
|
||||||
|
name: Verify Existing Signatures
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||||
|
|
||||||
|
- name: Verify DSSE envelopes
|
||||||
|
run: |
|
||||||
|
find . -name "*.dsse" -o -name "*.dsse.json" | while read f; do
|
||||||
|
echo "Checking $f..."
|
||||||
|
# Basic JSON validation
|
||||||
|
if ! jq empty "$f" 2>/dev/null; then
|
||||||
|
echo "Warning: Invalid JSON in $f"
|
||||||
|
fi
|
||||||
|
done
|
||||||
29
.gitea/workflows/attestation-bundle.yml
Normal file
29
.gitea/workflows/attestation-bundle.yml
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
name: attestation-bundle
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
attest_dir:
|
||||||
|
description: "Directory containing attestation artefacts"
|
||||||
|
required: true
|
||||||
|
default: "out/attest"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
bundle:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Build bundle
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/attest/build-attestation-bundle.sh
|
||||||
|
scripts/attest/build-attestation-bundle.sh "${{ github.event.inputs.attest_dir }}"
|
||||||
|
|
||||||
|
- name: Upload bundle
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: attestation-bundle
|
||||||
|
path: out/attest-bundles/**
|
||||||
@@ -58,6 +58,9 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
- name: Resolve Authority configuration
|
- name: Resolve Authority configuration
|
||||||
id: config
|
id: config
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
30
.gitea/workflows/bench-determinism.yml
Normal file
30
.gitea/workflows/bench-determinism.yml
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
name: bench-determinism
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
bench-determinism:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
|
||||||
|
- name: Run determinism bench
|
||||||
|
env:
|
||||||
|
BENCH_DETERMINISM_THRESHOLD: "0.95"
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/bench/determinism-run.sh
|
||||||
|
scripts/bench/determinism-run.sh
|
||||||
|
|
||||||
|
- name: Upload determinism artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: bench-determinism
|
||||||
|
path: out/bench-determinism/**
|
||||||
173
.gitea/workflows/benchmark-vs-competitors.yml
Normal file
173
.gitea/workflows/benchmark-vs-competitors.yml
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
name: Benchmark vs Competitors
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
# Run weekly on Sunday at 00:00 UTC
|
||||||
|
- cron: '0 0 * * 0'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
competitors:
|
||||||
|
description: 'Comma-separated list of competitors to benchmark against'
|
||||||
|
required: false
|
||||||
|
default: 'trivy,grype'
|
||||||
|
corpus_size:
|
||||||
|
description: 'Number of images from corpus to test'
|
||||||
|
required: false
|
||||||
|
default: '50'
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Benchmark/**'
|
||||||
|
- 'src/__Tests/__Benchmarks/competitors/**'
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
TRIVY_VERSION: '0.50.1'
|
||||||
|
GRYPE_VERSION: '0.74.0'
|
||||||
|
SYFT_VERSION: '0.100.0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
benchmark:
|
||||||
|
name: Run Competitive Benchmark
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 60
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Install Trivy
|
||||||
|
run: |
|
||||||
|
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v${{ env.TRIVY_VERSION }}
|
||||||
|
trivy --version
|
||||||
|
|
||||||
|
- name: Install Grype
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.GRYPE_VERSION }}
|
||||||
|
grype version
|
||||||
|
|
||||||
|
- name: Install Syft
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.SYFT_VERSION }}
|
||||||
|
syft version
|
||||||
|
|
||||||
|
- name: Build benchmark library
|
||||||
|
run: |
|
||||||
|
dotnet build src/Scanner/__Libraries/StellaOps.Scanner.Benchmark/StellaOps.Scanner.Benchmark.csproj -c Release
|
||||||
|
|
||||||
|
- name: Load corpus manifest
|
||||||
|
id: corpus
|
||||||
|
run: |
|
||||||
|
echo "corpus_path=src/__Tests/__Benchmarks/competitors/corpus/corpus-manifest.json" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Run Stella Ops scanner
|
||||||
|
run: |
|
||||||
|
echo "Running Stella Ops scanner on corpus..."
|
||||||
|
# TODO: Implement actual scan command
|
||||||
|
# stella scan --corpus ${{ steps.corpus.outputs.corpus_path }} --output src/__Tests/__Benchmarks/results/stellaops.json
|
||||||
|
|
||||||
|
- name: Run Trivy on corpus
|
||||||
|
run: |
|
||||||
|
echo "Running Trivy on corpus images..."
|
||||||
|
# Process each image in corpus
|
||||||
|
mkdir -p src/__Tests/__Benchmarks/results/trivy
|
||||||
|
|
||||||
|
- name: Run Grype on corpus
|
||||||
|
run: |
|
||||||
|
echo "Running Grype on corpus images..."
|
||||||
|
mkdir -p src/__Tests/__Benchmarks/results/grype
|
||||||
|
|
||||||
|
- name: Calculate metrics
|
||||||
|
run: |
|
||||||
|
echo "Calculating precision/recall/F1 metrics..."
|
||||||
|
# dotnet run --project src/Scanner/__Libraries/StellaOps.Scanner.Benchmark \
|
||||||
|
# --calculate-metrics \
|
||||||
|
# --ground-truth ${{ steps.corpus.outputs.corpus_path }} \
|
||||||
|
# --results src/__Tests/__Benchmarks/results/ \
|
||||||
|
# --output src/__Tests/__Benchmarks/results/metrics.json
|
||||||
|
|
||||||
|
- name: Generate comparison report
|
||||||
|
run: |
|
||||||
|
echo "Generating comparison report..."
|
||||||
|
mkdir -p src/__Tests/__Benchmarks/results
|
||||||
|
cat > src/__Tests/__Benchmarks/results/summary.json << 'EOF'
|
||||||
|
{
|
||||||
|
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||||
|
"competitors": ["trivy", "grype", "syft"],
|
||||||
|
"status": "pending_implementation"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Upload benchmark results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: benchmark-results-${{ github.run_id }}
|
||||||
|
path: src/__Tests/__Benchmarks/results/
|
||||||
|
retention-days: 90
|
||||||
|
|
||||||
|
- name: Update claims index
|
||||||
|
if: github.ref == 'refs/heads/main'
|
||||||
|
run: |
|
||||||
|
echo "Updating claims index with new evidence..."
|
||||||
|
# dotnet run --project src/Scanner/__Libraries/StellaOps.Scanner.Benchmark \
|
||||||
|
# --update-claims \
|
||||||
|
# --metrics src/__Tests/__Benchmarks/results/metrics.json \
|
||||||
|
# --output docs/claims-index.md
|
||||||
|
|
||||||
|
- name: Comment on PR
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const fs = require('fs');
|
||||||
|
const metrics = fs.existsSync('src/__Tests/__Benchmarks/results/metrics.json')
|
||||||
|
? JSON.parse(fs.readFileSync('src/__Tests/__Benchmarks/results/metrics.json', 'utf8'))
|
||||||
|
: { status: 'pending' };
|
||||||
|
|
||||||
|
const body = `## Benchmark Results
|
||||||
|
|
||||||
|
| Tool | Precision | Recall | F1 Score |
|
||||||
|
|------|-----------|--------|----------|
|
||||||
|
| Stella Ops | ${metrics.stellaops?.precision || 'N/A'} | ${metrics.stellaops?.recall || 'N/A'} | ${metrics.stellaops?.f1 || 'N/A'} |
|
||||||
|
| Trivy | ${metrics.trivy?.precision || 'N/A'} | ${metrics.trivy?.recall || 'N/A'} | ${metrics.trivy?.f1 || 'N/A'} |
|
||||||
|
| Grype | ${metrics.grype?.precision || 'N/A'} | ${metrics.grype?.recall || 'N/A'} | ${metrics.grype?.f1 || 'N/A'} |
|
||||||
|
|
||||||
|
[Full report](${process.env.GITHUB_SERVER_URL}/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID})
|
||||||
|
`;
|
||||||
|
|
||||||
|
github.rest.issues.createComment({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body: body
|
||||||
|
});
|
||||||
|
|
||||||
|
verify-claims:
|
||||||
|
name: Verify Claims
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: benchmark
|
||||||
|
if: github.ref == 'refs/heads/main'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download benchmark results
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: benchmark-results-${{ github.run_id }}
|
||||||
|
path: src/__Tests/__Benchmarks/results/
|
||||||
|
|
||||||
|
- name: Verify all claims
|
||||||
|
run: |
|
||||||
|
echo "Verifying all claims against new evidence..."
|
||||||
|
# stella benchmark verify --all
|
||||||
|
|
||||||
|
- name: Report claim status
|
||||||
|
run: |
|
||||||
|
echo "Generating claim verification report..."
|
||||||
|
# Output claim status summary
|
||||||
@@ -1,5 +1,16 @@
|
|||||||
# .gitea/workflows/build-test-deploy.yml
|
# .gitea/workflows/build-test-deploy.yml
|
||||||
# Unified CI/CD workflow for git.stella-ops.org (Feedser monorepo)
|
# Build, Validation, and Deployment workflow for git.stella-ops.org
|
||||||
|
#
|
||||||
|
# WORKFLOW INTEGRATION STRATEGY (Sprint 20251226_003_CICD):
|
||||||
|
# =========================================================
|
||||||
|
# This workflow handles: Build, Validation, Quality Gates, and Deployment
|
||||||
|
# Test execution is handled by: test-matrix.yml (runs in parallel on PRs)
|
||||||
|
#
|
||||||
|
# For PR gating:
|
||||||
|
# - test-matrix.yml gates on: Unit, Architecture, Contract, Integration, Security, Golden tests
|
||||||
|
# - build-test-deploy.yml gates on: Build validation, quality gates, security scans
|
||||||
|
#
|
||||||
|
# Both workflows run on PRs and should be required for merge via branch protection.
|
||||||
|
|
||||||
name: Build Test Deploy
|
name: Build Test Deploy
|
||||||
|
|
||||||
@@ -21,6 +32,8 @@ on:
|
|||||||
- 'docs/**'
|
- 'docs/**'
|
||||||
- 'scripts/**'
|
- 'scripts/**'
|
||||||
- '.gitea/workflows/**'
|
- '.gitea/workflows/**'
|
||||||
|
schedule:
|
||||||
|
- cron: '0 5 * * *'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
force_deploy:
|
force_deploy:
|
||||||
@@ -28,9 +41,14 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: 'false'
|
default: 'false'
|
||||||
type: boolean
|
type: boolean
|
||||||
|
excititor_batch:
|
||||||
|
description: 'Run Excititor batch-ingest validation suite'
|
||||||
|
required: false
|
||||||
|
default: 'false'
|
||||||
|
type: boolean
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
BUILD_CONFIGURATION: Release
|
BUILD_CONFIGURATION: Release
|
||||||
CI_CACHE_ROOT: /data/.cache/stella-ops/feedser
|
CI_CACHE_ROOT: /data/.cache/stella-ops/feedser
|
||||||
RUNNER_TOOL_CACHE: /toolcache
|
RUNNER_TOOL_CACHE: /toolcache
|
||||||
@@ -48,8 +66,20 @@ jobs:
|
|||||||
tar -xzf /tmp/helm.tgz -C /tmp
|
tar -xzf /tmp/helm.tgz -C /tmp
|
||||||
sudo install -m 0755 /tmp/linux-amd64/helm /usr/local/bin/helm
|
sudo install -m 0755 /tmp/linux-amd64/helm /usr/local/bin/helm
|
||||||
|
|
||||||
|
- name: Validate Helm chart rendering
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
CHART_PATH="devops/helm/stellaops"
|
||||||
|
helm lint "$CHART_PATH"
|
||||||
|
for values in values.yaml values-dev.yaml values-stage.yaml values-prod.yaml values-airgap.yaml values-mirror.yaml; do
|
||||||
|
release="stellaops-${values%.*}"
|
||||||
|
echo "::group::Helm template ${release} (${values})"
|
||||||
|
helm template "$release" "$CHART_PATH" -f "$CHART_PATH/$values" >/dev/null
|
||||||
|
echo "::endgroup::"
|
||||||
|
done
|
||||||
|
|
||||||
- name: Validate deployment profiles
|
- name: Validate deployment profiles
|
||||||
run: ./deploy/tools/validate-profiles.sh
|
run: ./devops/tools/validate-profiles.sh
|
||||||
|
|
||||||
build-test:
|
build-test:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
@@ -65,10 +95,21 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Ensure Mongo test URI configured
|
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||||
|
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||||
|
|
||||||
|
- name: Verify binary layout
|
||||||
|
run: .gitea/scripts/validate/verify-binaries.sh
|
||||||
|
|
||||||
|
- name: Ensure binary manifests are up to date
|
||||||
run: |
|
run: |
|
||||||
if [ -z "${STELLAOPS_TEST_MONGO_URI:-}" ]; then
|
python3 scripts/update-binary-manifests.py
|
||||||
echo "::error::STELLAOPS_TEST_MONGO_URI must be provided via repository secrets or variables for Graph Indexer integration tests."
|
git diff --exit-code .nuget/manifest.json vendor/manifest.json offline/feeds/manifest.json
|
||||||
|
|
||||||
|
- name: Ensure PostgreSQL test URI configured
|
||||||
|
run: |
|
||||||
|
if [ -z "${STELLAOPS_TEST_POSTGRES_CONNECTION:-}" ]; then
|
||||||
|
echo "::error::STELLAOPS_TEST_POSTGRES_CONNECTION must be provided via repository secrets or variables for integration tests."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -76,18 +117,22 @@ jobs:
|
|||||||
run: python3 scripts/verify-policy-scopes.py
|
run: python3 scripts/verify-policy-scopes.py
|
||||||
|
|
||||||
- name: Validate NuGet restore source ordering
|
- name: Validate NuGet restore source ordering
|
||||||
run: python3 ops/devops/validate_restore_sources.py
|
run: python3 devops/validate_restore_sources.py
|
||||||
|
|
||||||
- name: Validate telemetry storage configuration
|
- name: Validate telemetry storage configuration
|
||||||
run: python3 ops/devops/telemetry/validate_storage_stack.py
|
run: python3 devops/telemetry/validate_storage_stack.py
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: |
|
||||||
|
python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
- name: Telemetry tenant isolation smoke
|
- name: Telemetry tenant isolation smoke
|
||||||
env:
|
env:
|
||||||
COMPOSE_DIR: ${GITHUB_WORKSPACE}/deploy/compose
|
COMPOSE_DIR: ${GITHUB_WORKSPACE}/devops/compose
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
./ops/devops/telemetry/generate_dev_tls.sh
|
./devops/telemetry/generate_dev_tls.sh
|
||||||
COMPOSE_DIR="${COMPOSE_DIR:-${GITHUB_WORKSPACE}/deploy/compose}"
|
COMPOSE_DIR="${COMPOSE_DIR:-${GITHUB_WORKSPACE}/devops/compose}"
|
||||||
cleanup() {
|
cleanup() {
|
||||||
set +e
|
set +e
|
||||||
(cd "$COMPOSE_DIR" && docker compose -f docker-compose.telemetry.yaml down -v --remove-orphans >/dev/null 2>&1)
|
(cd "$COMPOSE_DIR" && docker compose -f docker-compose.telemetry.yaml down -v --remove-orphans >/dev/null 2>&1)
|
||||||
@@ -97,8 +142,8 @@ jobs:
|
|||||||
(cd "$COMPOSE_DIR" && docker compose -f docker-compose.telemetry-storage.yaml up -d)
|
(cd "$COMPOSE_DIR" && docker compose -f docker-compose.telemetry-storage.yaml up -d)
|
||||||
(cd "$COMPOSE_DIR" && docker compose -f docker-compose.telemetry.yaml up -d)
|
(cd "$COMPOSE_DIR" && docker compose -f docker-compose.telemetry.yaml up -d)
|
||||||
sleep 5
|
sleep 5
|
||||||
python3 ops/devops/telemetry/smoke_otel_collector.py --host localhost
|
python3 devops/telemetry/smoke_otel_collector.py --host localhost
|
||||||
python3 ops/devops/telemetry/tenant_isolation_smoke.py \
|
python3 devops/telemetry/tenant_isolation_smoke.py \
|
||||||
--collector https://localhost:4318/v1 \
|
--collector https://localhost:4318/v1 \
|
||||||
--tempo https://localhost:3200 \
|
--tempo https://localhost:3200 \
|
||||||
--loki https://localhost:3100
|
--loki https://localhost:3100
|
||||||
@@ -150,6 +195,37 @@ jobs:
|
|||||||
--logger "trx;LogFileName=stellaops-concelier-tests.trx" \
|
--logger "trx;LogFileName=stellaops-concelier-tests.trx" \
|
||||||
--results-directory "$TEST_RESULTS_DIR"
|
--results-directory "$TEST_RESULTS_DIR"
|
||||||
|
|
||||||
|
- name: Run PostgreSQL storage integration tests (Testcontainers)
|
||||||
|
env:
|
||||||
|
POSTGRES_TEST_IMAGE: postgres:16-alpine
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
mkdir -p "$TEST_RESULTS_DIR"
|
||||||
|
PROJECTS=(
|
||||||
|
src/__Libraries/__Tests/StellaOps.Infrastructure.Postgres.Tests/StellaOps.Infrastructure.Postgres.Tests.csproj
|
||||||
|
src/Authority/__Tests/StellaOps.Authority.Storage.Postgres.Tests/StellaOps.Authority.Storage.Postgres.Tests.csproj
|
||||||
|
src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Postgres.Tests/StellaOps.Scheduler.Storage.Postgres.Tests.csproj
|
||||||
|
src/Concelier/__Tests/StellaOps.Concelier.Storage.Postgres.Tests/StellaOps.Concelier.Storage.Postgres.Tests.csproj
|
||||||
|
src/Excititor/__Tests/StellaOps.Excititor.Storage.Postgres.Tests/StellaOps.Excititor.Storage.Postgres.Tests.csproj
|
||||||
|
src/Notify/__Tests/StellaOps.Notify.Storage.Postgres.Tests/StellaOps.Notify.Storage.Postgres.Tests.csproj
|
||||||
|
src/Policy/__Tests/StellaOps.Policy.Storage.Postgres.Tests/StellaOps.Policy.Storage.Postgres.Tests.csproj
|
||||||
|
)
|
||||||
|
for project in "${PROJECTS[@]}"; do
|
||||||
|
name="$(basename "${project%.*}")"
|
||||||
|
dotnet test "$project" \
|
||||||
|
--configuration $BUILD_CONFIGURATION \
|
||||||
|
--logger "trx;LogFileName=${name}.trx" \
|
||||||
|
--results-directory "$TEST_RESULTS_DIR"
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Run TimelineIndexer tests (EB1 evidence linkage gate)
|
||||||
|
run: |
|
||||||
|
mkdir -p "$TEST_RESULTS_DIR"
|
||||||
|
dotnet test src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.sln \
|
||||||
|
--configuration $BUILD_CONFIGURATION \
|
||||||
|
--logger "trx;LogFileName=timelineindexer-tests.trx" \
|
||||||
|
--results-directory "$TEST_RESULTS_DIR"
|
||||||
|
|
||||||
- name: Lint policy DSL samples
|
- name: Lint policy DSL samples
|
||||||
run: dotnet run --project tools/PolicyDslValidator/PolicyDslValidator.csproj -- --strict docs/examples/policies/*.yaml
|
run: dotnet run --project tools/PolicyDslValidator/PolicyDslValidator.csproj -- --strict docs/examples/policies/*.yaml
|
||||||
|
|
||||||
@@ -255,7 +331,7 @@ PY
|
|||||||
|
|
||||||
curl -sSf -X POST -H 'Content-type: application/json' --data "$payload" "$SLACK_WEBHOOK"
|
curl -sSf -X POST -H 'Content-type: application/json' --data "$payload" "$SLACK_WEBHOOK"
|
||||||
- name: Run release tooling tests
|
- name: Run release tooling tests
|
||||||
run: python ops/devops/release/test_verify_release.py
|
run: python devops/release/test_verify_release.py
|
||||||
|
|
||||||
- name: Build scanner language analyzer projects
|
- name: Build scanner language analyzer projects
|
||||||
run: |
|
run: |
|
||||||
@@ -280,6 +356,56 @@ PY
|
|||||||
--logger "trx;LogFileName=stellaops-scanner-lang-tests.trx" \
|
--logger "trx;LogFileName=stellaops-scanner-lang-tests.trx" \
|
||||||
--results-directory "$TEST_RESULTS_DIR"
|
--results-directory "$TEST_RESULTS_DIR"
|
||||||
|
|
||||||
|
- name: Build and test Router components
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
ROUTER_PROJECTS=(
|
||||||
|
src/__Libraries/StellaOps.Router.Common/StellaOps.Router.Common.csproj
|
||||||
|
src/__Libraries/StellaOps.Router.Config/StellaOps.Router.Config.csproj
|
||||||
|
src/__Libraries/StellaOps.Router.Transport.InMemory/StellaOps.Router.Transport.InMemory.csproj
|
||||||
|
src/__Libraries/StellaOps.Router.Transport.Tcp/StellaOps.Router.Transport.Tcp.csproj
|
||||||
|
src/__Libraries/StellaOps.Router.Transport.Tls/StellaOps.Router.Transport.Tls.csproj
|
||||||
|
src/__Libraries/StellaOps.Router.Transport.Udp/StellaOps.Router.Transport.Udp.csproj
|
||||||
|
src/__Libraries/StellaOps.Router.Transport.RabbitMq/StellaOps.Router.Transport.RabbitMq.csproj
|
||||||
|
src/__Libraries/StellaOps.Microservice/StellaOps.Microservice.csproj
|
||||||
|
src/__Libraries/StellaOps.Microservice.SourceGen/StellaOps.Microservice.SourceGen.csproj
|
||||||
|
)
|
||||||
|
for project in "${ROUTER_PROJECTS[@]}"; do
|
||||||
|
echo "::group::Build $project"
|
||||||
|
dotnet build "$project" --configuration $BUILD_CONFIGURATION --no-restore -warnaserror
|
||||||
|
echo "::endgroup::"
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Run Router and Microservice tests
|
||||||
|
run: |
|
||||||
|
mkdir -p "$TEST_RESULTS_DIR"
|
||||||
|
ROUTER_TEST_PROJECTS=(
|
||||||
|
# Core Router libraries
|
||||||
|
src/__Libraries/__Tests/StellaOps.Router.Common.Tests/StellaOps.Router.Common.Tests.csproj
|
||||||
|
src/__Libraries/__Tests/StellaOps.Router.Config.Tests/StellaOps.Router.Config.Tests.csproj
|
||||||
|
# Transport layers
|
||||||
|
src/__Libraries/__Tests/StellaOps.Router.Transport.InMemory.Tests/StellaOps.Router.Transport.InMemory.Tests.csproj
|
||||||
|
src/__Libraries/__Tests/StellaOps.Router.Transport.Tcp.Tests/StellaOps.Router.Transport.Tcp.Tests.csproj
|
||||||
|
src/__Libraries/__Tests/StellaOps.Router.Transport.Tls.Tests/StellaOps.Router.Transport.Tls.Tests.csproj
|
||||||
|
src/__Libraries/__Tests/StellaOps.Router.Transport.Udp.Tests/StellaOps.Router.Transport.Udp.Tests.csproj
|
||||||
|
# Microservice SDK
|
||||||
|
src/__Libraries/__Tests/StellaOps.Microservice.Tests/StellaOps.Microservice.Tests.csproj
|
||||||
|
src/__Libraries/__Tests/StellaOps.Microservice.SourceGen.Tests/StellaOps.Microservice.SourceGen.Tests.csproj
|
||||||
|
# Integration tests
|
||||||
|
src/__Libraries/__Tests/StellaOps.Router.Integration.Tests/StellaOps.Router.Integration.Tests.csproj
|
||||||
|
# Gateway tests
|
||||||
|
src/Gateway/__Tests/StellaOps.Gateway.WebService.Tests/StellaOps.Gateway.WebService.Tests.csproj
|
||||||
|
)
|
||||||
|
for project in "${ROUTER_TEST_PROJECTS[@]}"; do
|
||||||
|
name="$(basename "${project%.*}")"
|
||||||
|
echo "::group::Test $name"
|
||||||
|
dotnet test "$project" \
|
||||||
|
--configuration $BUILD_CONFIGURATION \
|
||||||
|
--logger "trx;LogFileName=${name}.trx" \
|
||||||
|
--results-directory "$TEST_RESULTS_DIR"
|
||||||
|
echo "::endgroup::"
|
||||||
|
done
|
||||||
|
|
||||||
- name: Run scanner analyzer performance benchmark
|
- name: Run scanner analyzer performance benchmark
|
||||||
env:
|
env:
|
||||||
PERF_OUTPUT_DIR: ${{ github.workspace }}/artifacts/perf/scanner-analyzers
|
PERF_OUTPUT_DIR: ${{ github.workspace }}/artifacts/perf/scanner-analyzers
|
||||||
@@ -442,6 +568,15 @@ PY
|
|||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
|
- name: Run console endpoint tests
|
||||||
|
run: |
|
||||||
|
mkdir -p "$TEST_RESULTS_DIR"
|
||||||
|
dotnet test src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj \
|
||||||
|
--configuration $BUILD_CONFIGURATION \
|
||||||
|
--logger "trx;LogFileName=console-endpoints.trx" \
|
||||||
|
--results-directory "$TEST_RESULTS_DIR" \
|
||||||
|
--filter ConsoleEndpointsTests
|
||||||
|
|
||||||
- name: Upload test results
|
- name: Upload test results
|
||||||
if: always()
|
if: always()
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@@ -451,6 +586,247 @@ PY
|
|||||||
if-no-files-found: ignore
|
if-no-files-found: ignore
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Quality Gates Foundation (Sprint 0350)
|
||||||
|
# ============================================================================
|
||||||
|
quality-gates:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Reachability quality gate
|
||||||
|
id: reachability
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Computing reachability metrics"
|
||||||
|
if [ -f .gitea/scripts/metrics/compute-reachability-metrics.sh ]; then
|
||||||
|
chmod +x .gitea/scripts/metrics/compute-reachability-metrics.sh
|
||||||
|
METRICS=$(./.gitea/scripts/metrics/compute-reachability-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||||
|
echo "metrics=$METRICS" >> $GITHUB_OUTPUT
|
||||||
|
echo "Reachability metrics: $METRICS"
|
||||||
|
else
|
||||||
|
echo "Reachability script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: TTFS regression gate
|
||||||
|
id: ttfs
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Computing TTFS metrics"
|
||||||
|
if [ -f .gitea/scripts/metrics/compute-ttfs-metrics.sh ]; then
|
||||||
|
chmod +x .gitea/scripts/metrics/compute-ttfs-metrics.sh
|
||||||
|
METRICS=$(./.gitea/scripts/metrics/compute-ttfs-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||||
|
echo "metrics=$METRICS" >> $GITHUB_OUTPUT
|
||||||
|
echo "TTFS metrics: $METRICS"
|
||||||
|
else
|
||||||
|
echo "TTFS script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Performance SLO gate
|
||||||
|
id: slo
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Enforcing performance SLOs"
|
||||||
|
if [ -f .gitea/scripts/metrics/enforce-performance-slos.sh ]; then
|
||||||
|
chmod +x .gitea/scripts/metrics/enforce-performance-slos.sh
|
||||||
|
./.gitea/scripts/metrics/enforce-performance-slos.sh --warn-only || true
|
||||||
|
else
|
||||||
|
echo "Performance SLO script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: RLS policy validation
|
||||||
|
id: rls
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Validating RLS policies"
|
||||||
|
if [ -f devops/database/postgres/validation/001_validate_rls.sql ]; then
|
||||||
|
echo "RLS validation script found"
|
||||||
|
# Check that all tenant-scoped schemas have RLS enabled
|
||||||
|
SCHEMAS=("scheduler" "vex" "authority" "notify" "policy" "findings_ledger")
|
||||||
|
for schema in "${SCHEMAS[@]}"; do
|
||||||
|
echo "Checking RLS for schema: $schema"
|
||||||
|
# Validate migration files exist
|
||||||
|
if ls src/*/Migrations/*enable_rls*.sql 2>/dev/null | grep -q "$schema"; then
|
||||||
|
echo " ✓ RLS migration exists for $schema"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo "RLS validation passed (static check)"
|
||||||
|
else
|
||||||
|
echo "RLS validation script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Upload quality gate results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: quality-gate-results
|
||||||
|
path: |
|
||||||
|
scripts/ci/*.json
|
||||||
|
scripts/ci/*.yaml
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
security-testing:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
if: github.event_name == 'pull_request' || github.event_name == 'schedule'
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/security/StellaOps.Security.Tests/StellaOps.Security.Tests.csproj
|
||||||
|
|
||||||
|
- name: Run OWASP security tests
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Running security tests"
|
||||||
|
dotnet test src/__Tests/security/StellaOps.Security.Tests/StellaOps.Security.Tests.csproj \
|
||||||
|
--no-restore \
|
||||||
|
--logger "trx;LogFileName=security-tests.trx" \
|
||||||
|
--results-directory ./security-test-results \
|
||||||
|
--filter "Category=Security" \
|
||||||
|
--verbosity normal
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Upload security test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: security-test-results
|
||||||
|
path: security-test-results/
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
mutation-testing:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
if: github.event_name == 'schedule' || (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'mutation-test'))
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore tools
|
||||||
|
run: dotnet tool restore
|
||||||
|
|
||||||
|
- name: Run mutation tests - Scanner.Core
|
||||||
|
id: scanner-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Scanner.Core"
|
||||||
|
cd src/Scanner/__Libraries/StellaOps.Scanner.Core
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../../mutation-results/scanner-core || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Run mutation tests - Policy.Engine
|
||||||
|
id: policy-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Policy.Engine"
|
||||||
|
cd src/Policy/__Libraries/StellaOps.Policy
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../../mutation-results/policy-engine || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Run mutation tests - Authority.Core
|
||||||
|
id: authority-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Authority.Core"
|
||||||
|
cd src/Authority/StellaOps.Authority
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../mutation-results/authority-core || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Upload mutation results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: mutation-testing-results
|
||||||
|
path: mutation-results/
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Check mutation thresholds
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Checking mutation score thresholds..."
|
||||||
|
# Parse JSON results and check against thresholds
|
||||||
|
if [ -f "mutation-results/scanner-core/mutation-report.json" ]; then
|
||||||
|
SCORE=$(jq '.mutationScore // 0' mutation-results/scanner-core/mutation-report.json)
|
||||||
|
echo "Scanner.Core mutation score: $SCORE%"
|
||||||
|
if (( $(echo "$SCORE < 65" | bc -l) )); then
|
||||||
|
echo "::error::Scanner.Core mutation score below threshold"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
sealed-mode-ci:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: read
|
||||||
|
env:
|
||||||
|
COMPOSE_PROJECT_NAME: sealedmode
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Login to registry
|
||||||
|
if: ${{ secrets.REGISTRY_USERNAME != '' && secrets.REGISTRY_PASSWORD != '' }}
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: registry.stella-ops.org
|
||||||
|
username: ${{ secrets.REGISTRY_USERNAME }}
|
||||||
|
password: ${{ secrets.REGISTRY_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Run sealed-mode CI harness
|
||||||
|
working-directory: devops/sealed-mode-ci
|
||||||
|
env:
|
||||||
|
COMPOSE_PROJECT_NAME: sealedmode
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
./run-sealed-ci.sh
|
||||||
|
|
||||||
|
- name: Upload sealed-mode CI artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: sealed-mode-ci
|
||||||
|
path: devops/sealed-mode-ci/artifacts/sealed-mode-ci
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
authority-container:
|
authority-container:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
needs: build-test
|
needs: build-test
|
||||||
@@ -464,6 +840,41 @@ PY
|
|||||||
- name: Build Authority container image
|
- name: Build Authority container image
|
||||||
run: docker build -f ops/authority/Dockerfile -t stellaops-authority:ci .
|
run: docker build -f ops/authority/Dockerfile -t stellaops-authority:ci .
|
||||||
|
|
||||||
|
excititor-batch-validation:
|
||||||
|
needs: build-test
|
||||||
|
if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.excititor_batch == 'true')
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
BATCH_RESULTS_DIR: ${{ github.workspace }}/artifacts/test-results/excititor-batch
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Run Excititor batch ingest validation suite
|
||||||
|
env:
|
||||||
|
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
mkdir -p "$BATCH_RESULTS_DIR"
|
||||||
|
dotnet test src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/StellaOps.Excititor.WebService.Tests.csproj \
|
||||||
|
--configuration $BUILD_CONFIGURATION \
|
||||||
|
--filter "Category=BatchIngestValidation" \
|
||||||
|
--logger "trx;LogFileName=excititor-batch.trx" \
|
||||||
|
--results-directory "$BATCH_RESULTS_DIR"
|
||||||
|
|
||||||
|
- name: Upload Excititor batch ingest results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: excititor-batch-ingest-results
|
||||||
|
path: ${{ env.BATCH_RESULTS_DIR }}
|
||||||
|
|
||||||
docs:
|
docs:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
|
|||||||
48
.gitea/workflows/cli-build.yml
Normal file
48
.gitea/workflows/cli-build.yml
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
name: cli-build
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
rids:
|
||||||
|
description: "Comma-separated RIDs (e.g., linux-x64,win-x64,osx-arm64)"
|
||||||
|
required: false
|
||||||
|
default: "linux-x64,win-x64,osx-arm64"
|
||||||
|
config:
|
||||||
|
description: "Build configuration"
|
||||||
|
required: false
|
||||||
|
default: "Release"
|
||||||
|
sign:
|
||||||
|
description: "Enable cosign signing (requires COSIGN_KEY)"
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Install syft (SBOM)
|
||||||
|
uses: anchore/sbom-action/download-syft@v0
|
||||||
|
|
||||||
|
- name: Build CLI artifacts
|
||||||
|
run: |
|
||||||
|
chmod +x .gitea/scripts/build/build-cli.sh
|
||||||
|
RIDS="${{ github.event.inputs.rids }}" CONFIG="${{ github.event.inputs.config }}" SBOM_TOOL=syft SIGN="${{ github.event.inputs.sign }}" COSIGN_KEY="${{ secrets.COSIGN_KEY }}" .gitea/scripts/build/build-cli.sh
|
||||||
|
|
||||||
|
- name: List artifacts
|
||||||
|
run: find out/cli -maxdepth 3 -type f -print
|
||||||
|
|
||||||
|
- name: Upload CLI artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: stella-cli
|
||||||
|
path: out/cli/**
|
||||||
47
.gitea/workflows/cli-chaos-parity.yml
Normal file
47
.gitea/workflows/cli-chaos-parity.yml
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
name: cli-chaos-parity
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
chaos:
|
||||||
|
description: "Run chaos smoke (true/false)"
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
parity:
|
||||||
|
description: "Run parity diff (true/false)"
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cli-checks:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Chaos smoke
|
||||||
|
if: ${{ github.event.inputs.chaos == 'true' }}
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/cli/chaos-smoke.sh
|
||||||
|
scripts/cli/chaos-smoke.sh
|
||||||
|
|
||||||
|
- name: Parity diff
|
||||||
|
if: ${{ github.event.inputs.parity == 'true' }}
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/cli/parity-diff.sh
|
||||||
|
scripts/cli/parity-diff.sh
|
||||||
|
|
||||||
|
- name: Upload evidence
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: cli-chaos-parity
|
||||||
|
path: |
|
||||||
|
out/cli-chaos/**
|
||||||
|
out/cli-goldens/**
|
||||||
47
.gitea/workflows/concelier-attestation-tests.yml
Normal file
47
.gitea/workflows/concelier-attestation-tests.yml
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
name: Concelier Attestation Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- 'src/Concelier/**'
|
||||||
|
- '.gitea/workflows/concelier-attestation-tests.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Concelier/**'
|
||||||
|
- '.gitea/workflows/concelier-attestation-tests.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
attestation-tests:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup .NET 10 preview
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.100'
|
||||||
|
|
||||||
|
- name: Restore Concelier solution
|
||||||
|
run: dotnet restore src/Concelier/StellaOps.Concelier.sln
|
||||||
|
|
||||||
|
- name: Build WebService Tests (no analyzers)
|
||||||
|
run: dotnet build src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj -c Release -p:DisableAnalyzers=true
|
||||||
|
|
||||||
|
- name: Run WebService attestation test
|
||||||
|
run: dotnet test src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj -c Release --filter InternalAttestationVerify --no-build --logger trx --results-directory TestResults
|
||||||
|
|
||||||
|
- name: Build Core Tests (no analyzers)
|
||||||
|
run: dotnet build src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj -c Release -p:DisableAnalyzers=true
|
||||||
|
|
||||||
|
- name: Run Core attestation builder tests
|
||||||
|
run: dotnet test src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj -c Release --filter EvidenceBundleAttestationBuilderTests --no-build --logger trx --results-directory TestResults
|
||||||
|
|
||||||
|
- name: Upload TRX results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: concelier-attestation-tests-trx
|
||||||
|
path: '**/TestResults/*.trx'
|
||||||
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: Concelier STORE-AOC-19-005 Dataset
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-dataset:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/out/linksets
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: sudo apt-get update && sudo apt-get install -y zstd
|
||||||
|
|
||||||
|
- name: Build dataset tarball
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/concelier/build-store-aoc-19-005-dataset.sh scripts/concelier/test-store-aoc-19-005-dataset.sh
|
||||||
|
scripts/concelier/build-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||||
|
|
||||||
|
- name: Validate dataset
|
||||||
|
run: scripts/concelier/test-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||||
|
|
||||||
|
- name: Upload dataset artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: concelier-store-aoc-19-005-dataset
|
||||||
|
path: |
|
||||||
|
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst
|
||||||
|
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst.sha256
|
||||||
247
.gitea/workflows/connector-fixture-drift.yml
Normal file
247
.gitea/workflows/connector-fixture-drift.yml
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# connector-fixture-drift.yml
|
||||||
|
# Sprint: SPRINT_5100_0007_0005_connector_fixtures
|
||||||
|
# Task: CONN-FIX-016
|
||||||
|
# Description: Weekly schema drift detection for connector fixtures with auto-PR
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
name: Connector Fixture Drift
|
||||||
|
|
||||||
|
on:
|
||||||
|
# Weekly schedule: Sunday at 2:00 UTC
|
||||||
|
schedule:
|
||||||
|
- cron: '0 2 * * 0'
|
||||||
|
# Manual trigger for on-demand drift detection
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
auto_update:
|
||||||
|
description: 'Auto-update fixtures if drift detected'
|
||||||
|
required: false
|
||||||
|
default: 'true'
|
||||||
|
type: boolean
|
||||||
|
create_pr:
|
||||||
|
description: 'Create PR for updated fixtures'
|
||||||
|
required: false
|
||||||
|
default: 'true'
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
TZ: UTC
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
detect-drift:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
outputs:
|
||||||
|
has_drift: ${{ steps.drift.outputs.has_drift }}
|
||||||
|
drift_count: ${{ steps.drift.outputs.drift_count }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.100'
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
.nuget/packages
|
||||||
|
key: fixture-drift-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||||
|
|
||||||
|
- name: Restore solution
|
||||||
|
run: dotnet restore src/StellaOps.sln --configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build test projects
|
||||||
|
run: |
|
||||||
|
dotnet build src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj -c Release --no-restore
|
||||||
|
dotnet build src/Excititor/__Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Run Live schema drift tests
|
||||||
|
id: drift
|
||||||
|
env:
|
||||||
|
STELLAOPS_LIVE_TESTS: 'true'
|
||||||
|
STELLAOPS_UPDATE_FIXTURES: ${{ inputs.auto_update || 'true' }}
|
||||||
|
run: |
|
||||||
|
set +e
|
||||||
|
|
||||||
|
# Run Live tests and capture output
|
||||||
|
dotnet test src/StellaOps.sln \
|
||||||
|
--filter "Category=Live" \
|
||||||
|
--no-build \
|
||||||
|
-c Release \
|
||||||
|
--logger "console;verbosity=detailed" \
|
||||||
|
--results-directory out/drift-results \
|
||||||
|
2>&1 | tee out/drift-output.log
|
||||||
|
|
||||||
|
EXIT_CODE=$?
|
||||||
|
|
||||||
|
# Check for fixture changes
|
||||||
|
CHANGED_FILES=$(git diff --name-only -- '**/Fixtures/*.json' '**/Expected/*.json' | wc -l)
|
||||||
|
|
||||||
|
if [ "$CHANGED_FILES" -gt 0 ]; then
|
||||||
|
echo "has_drift=true" >> $GITHUB_OUTPUT
|
||||||
|
echo "drift_count=$CHANGED_FILES" >> $GITHUB_OUTPUT
|
||||||
|
echo "::warning::Schema drift detected in $CHANGED_FILES fixture files"
|
||||||
|
else
|
||||||
|
echo "has_drift=false" >> $GITHUB_OUTPUT
|
||||||
|
echo "drift_count=0" >> $GITHUB_OUTPUT
|
||||||
|
echo "::notice::No schema drift detected"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Don't fail workflow on test failures (drift is expected)
|
||||||
|
exit 0
|
||||||
|
|
||||||
|
- name: Show changed fixtures
|
||||||
|
if: steps.drift.outputs.has_drift == 'true'
|
||||||
|
run: |
|
||||||
|
echo "## Changed fixture files:"
|
||||||
|
git diff --name-only -- '**/Fixtures/*.json' '**/Expected/*.json'
|
||||||
|
echo ""
|
||||||
|
echo "## Diff summary:"
|
||||||
|
git diff --stat -- '**/Fixtures/*.json' '**/Expected/*.json'
|
||||||
|
|
||||||
|
- name: Upload drift report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: drift-report-${{ github.run_id }}
|
||||||
|
path: |
|
||||||
|
out/drift-output.log
|
||||||
|
out/drift-results/**
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
create-pr:
|
||||||
|
needs: detect-drift
|
||||||
|
if: needs.detect-drift.outputs.has_drift == 'true' && (github.event.inputs.create_pr == 'true' || github.event_name == 'schedule')
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.100'
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore and run Live tests with updates
|
||||||
|
env:
|
||||||
|
STELLAOPS_LIVE_TESTS: 'true'
|
||||||
|
STELLAOPS_UPDATE_FIXTURES: 'true'
|
||||||
|
run: |
|
||||||
|
dotnet restore src/StellaOps.sln --configfile nuget.config
|
||||||
|
dotnet test src/StellaOps.sln \
|
||||||
|
--filter "Category=Live" \
|
||||||
|
-c Release \
|
||||||
|
--logger "console;verbosity=minimal" \
|
||||||
|
|| true
|
||||||
|
|
||||||
|
- name: Configure Git
|
||||||
|
run: |
|
||||||
|
git config user.name "StellaOps Bot"
|
||||||
|
git config user.email "bot@stellaops.local"
|
||||||
|
|
||||||
|
- name: Create branch and commit
|
||||||
|
id: commit
|
||||||
|
run: |
|
||||||
|
BRANCH_NAME="fixture-drift/$(date +%Y-%m-%d)"
|
||||||
|
echo "branch=$BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# Check for changes
|
||||||
|
if git diff --quiet -- '**/Fixtures/*.json' '**/Expected/*.json'; then
|
||||||
|
echo "No fixture changes to commit"
|
||||||
|
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# Create branch
|
||||||
|
git checkout -b "$BRANCH_NAME"
|
||||||
|
|
||||||
|
# Stage fixture changes
|
||||||
|
git add '**/Fixtures/*.json' '**/Expected/*.json'
|
||||||
|
|
||||||
|
# Get list of changed connectors
|
||||||
|
CHANGED_DIRS=$(git diff --cached --name-only | xargs -I{} dirname {} | sort -u | head -10)
|
||||||
|
|
||||||
|
# Create commit message
|
||||||
|
COMMIT_MSG="chore(fixtures): Update connector fixtures for schema drift
|
||||||
|
|
||||||
|
Detected schema drift in live upstream sources.
|
||||||
|
Updated fixture files to match current API responses.
|
||||||
|
|
||||||
|
Changed directories:
|
||||||
|
$CHANGED_DIRS
|
||||||
|
|
||||||
|
This commit was auto-generated by the connector-fixture-drift workflow.
|
||||||
|
|
||||||
|
🤖 Generated with [StellaOps CI](https://stellaops.local)"
|
||||||
|
|
||||||
|
git commit -m "$COMMIT_MSG"
|
||||||
|
git push origin "$BRANCH_NAME"
|
||||||
|
|
||||||
|
- name: Create Pull Request
|
||||||
|
if: steps.commit.outputs.has_changes == 'true'
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const branch = '${{ steps.commit.outputs.branch }}';
|
||||||
|
const driftCount = '${{ needs.detect-drift.outputs.drift_count }}';
|
||||||
|
|
||||||
|
const { data: pr } = await github.rest.pulls.create({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
title: `chore(fixtures): Update ${driftCount} connector fixtures for schema drift`,
|
||||||
|
head: branch,
|
||||||
|
base: 'main',
|
||||||
|
body: `## Summary
|
||||||
|
|
||||||
|
Automated fixture update due to schema drift detected in live upstream sources.
|
||||||
|
|
||||||
|
- **Fixtures Updated**: ${driftCount}
|
||||||
|
- **Detection Date**: ${new Date().toISOString().split('T')[0]}
|
||||||
|
- **Workflow Run**: [#${{ github.run_id }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||||
|
|
||||||
|
## Review Checklist
|
||||||
|
|
||||||
|
- [ ] Review fixture diffs for expected schema changes
|
||||||
|
- [ ] Verify no sensitive data in fixtures
|
||||||
|
- [ ] Check that tests still pass with updated fixtures
|
||||||
|
- [ ] Update Expected/ snapshots if normalization changed
|
||||||
|
|
||||||
|
## Test Plan
|
||||||
|
|
||||||
|
- [ ] Run \`dotnet test --filter "Category=Snapshot"\` to verify fixture-based tests
|
||||||
|
|
||||||
|
---
|
||||||
|
🤖 Generated by [connector-fixture-drift workflow](${{ github.server_url }}/${{ github.repository }}/actions/workflows/connector-fixture-drift.yml)
|
||||||
|
`
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Created PR #${pr.number}: ${pr.html_url}`);
|
||||||
|
|
||||||
|
// Add labels
|
||||||
|
await github.rest.issues.addLabels({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: pr.number,
|
||||||
|
labels: ['automated', 'fixtures', 'schema-drift']
|
||||||
|
});
|
||||||
64
.gitea/workflows/console-ci.yml
Normal file
64
.gitea/workflows/console-ci.yml
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
name: console-ci
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Web/**'
|
||||||
|
- '.gitea/workflows/console-ci.yml'
|
||||||
|
- 'devops/console/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint-test-build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: src/Web/StellaOps.Web
|
||||||
|
env:
|
||||||
|
PLAYWRIGHT_BROWSERS_PATH: ~/.cache/ms-playwright
|
||||||
|
CI: true
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: npm
|
||||||
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
|
- name: Install deps (offline-friendly)
|
||||||
|
run: npm ci --prefer-offline --no-audit --progress=false
|
||||||
|
|
||||||
|
- name: Lint
|
||||||
|
run: npm run lint -- --no-progress
|
||||||
|
|
||||||
|
- name: Console export specs (targeted)
|
||||||
|
run: bash ./scripts/ci-console-exports.sh
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Unit tests
|
||||||
|
run: npm run test:ci
|
||||||
|
env:
|
||||||
|
CHROME_BIN: chromium
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: npm run build -- --configuration=production --progress=false
|
||||||
|
|
||||||
|
- name: Collect artifacts
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
mkdir -p ../artifacts
|
||||||
|
cp -r dist ../artifacts/dist || true
|
||||||
|
cp -r coverage ../artifacts/coverage || true
|
||||||
|
find . -maxdepth 3 -type f -name "*.xml" -o -name "*.trx" -o -name "*.json" -path "*test*" -print0 | xargs -0 -I{} cp --parents {} ../artifacts 2>/dev/null || true
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: console-ci-${{ github.run_id }}
|
||||||
|
path: artifacts
|
||||||
|
retention-days: 14
|
||||||
32
.gitea/workflows/console-runner-image.yml
Normal file
32
.gitea/workflows/console-runner-image.yml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: console-runner-image
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- 'devops/console/**'
|
||||||
|
- '.gitea/workflows/console-runner-image.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-runner-image:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Build runner image tarball (baked caches)
|
||||||
|
env:
|
||||||
|
RUN_ID: ${{ github.run_id }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
chmod +x devops/console/build-runner-image.sh devops/console/build-runner-image-ci.sh
|
||||||
|
devops/console/build-runner-image-ci.sh
|
||||||
|
|
||||||
|
- name: Upload runner image artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: console-runner-image-${{ github.run_id }}
|
||||||
|
path: devops/artifacts/console-runner/
|
||||||
|
retention-days: 14
|
||||||
89
.gitea/workflows/containers-multiarch.yml
Normal file
89
.gitea/workflows/containers-multiarch.yml
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
name: containers-multiarch
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
image:
|
||||||
|
description: "Image tag (e.g., ghcr.io/stella-ops/example:edge)"
|
||||||
|
required: true
|
||||||
|
context:
|
||||||
|
description: "Build context directory"
|
||||||
|
required: true
|
||||||
|
default: "."
|
||||||
|
platforms:
|
||||||
|
description: "Platforms (comma-separated)"
|
||||||
|
required: false
|
||||||
|
default: "linux/amd64,linux/arm64"
|
||||||
|
push:
|
||||||
|
description: "Push to registry"
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-multiarch:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
with:
|
||||||
|
install: true
|
||||||
|
|
||||||
|
- name: Install syft (SBOM)
|
||||||
|
uses: anchore/sbom-action/download-syft@v0
|
||||||
|
|
||||||
|
- name: Login to ghcr (optional)
|
||||||
|
if: ${{ github.event.inputs.push == 'true' && secrets.GHCR_TOKEN != '' }}
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GHCR_TOKEN }}
|
||||||
|
|
||||||
|
- name: Run multi-arch build
|
||||||
|
env:
|
||||||
|
COSIGN_EXPERIMENTAL: "1"
|
||||||
|
run: |
|
||||||
|
chmod +x .gitea/scripts/build/build-multiarch.sh
|
||||||
|
extra=""
|
||||||
|
if [[ "${{ github.event.inputs.push }}" == "true" ]]; then extra="--push"; fi
|
||||||
|
.gitea/scripts/build/build-multiarch.sh \
|
||||||
|
"${{ github.event.inputs.image }}" \
|
||||||
|
"${{ github.event.inputs.context }}" \
|
||||||
|
--platform "${{ github.event.inputs.platforms }}" \
|
||||||
|
--sbom syft ${extra}
|
||||||
|
|
||||||
|
- name: Build air-gap bundle
|
||||||
|
run: |
|
||||||
|
chmod +x .gitea/scripts/build/build-airgap-bundle.sh
|
||||||
|
.gitea/scripts/build/build-airgap-bundle.sh "${{ github.event.inputs.image }}"
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: buildx-${{ github.event.inputs.image }}
|
||||||
|
path: out/buildx/**
|
||||||
|
|
||||||
|
- name: Inspect built image archive
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
ls -lh out/buildx/
|
||||||
|
find out/buildx -name "image.oci" -print -exec sh -c 'tar -tf "$1" | head' _ {} \;
|
||||||
|
|
||||||
|
- name: Upload air-gap bundle
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: bundle-${{ github.event.inputs.image }}
|
||||||
|
path: out/bundles/**
|
||||||
|
|
||||||
|
- name: Inspect remote image (if pushed)
|
||||||
|
if: ${{ github.event.inputs.push == 'true' }}
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools inspect "${{ github.event.inputs.image }}"
|
||||||
206
.gitea/workflows/cross-platform-determinism.yml
Normal file
206
.gitea/workflows/cross-platform-determinism.yml
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
name: cross-platform-determinism
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/__Libraries/StellaOps.Canonical.Json/**'
|
||||||
|
- 'src/__Libraries/StellaOps.Replay.Core/**'
|
||||||
|
- 'src/__Tests/**Determinism**'
|
||||||
|
- '.gitea/workflows/cross-platform-determinism.yml'
|
||||||
|
pull_request:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/__Libraries/StellaOps.Canonical.Json/**'
|
||||||
|
- 'src/__Libraries/StellaOps.Replay.Core/**'
|
||||||
|
- 'src/__Tests/**Determinism**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# DET-GAP-11: Windows determinism test runner
|
||||||
|
determinism-windows:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj
|
||||||
|
|
||||||
|
- name: Run determinism property tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj `
|
||||||
|
--logger "trx;LogFileName=determinism-windows.trx" `
|
||||||
|
--results-directory ./test-results/windows
|
||||||
|
|
||||||
|
- name: Generate hash report
|
||||||
|
shell: pwsh
|
||||||
|
run: |
|
||||||
|
# Generate determinism baseline hashes
|
||||||
|
$hashReport = @{
|
||||||
|
platform = "windows"
|
||||||
|
timestamp = (Get-Date -Format "o")
|
||||||
|
hashes = @{}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run hash generation script
|
||||||
|
dotnet run --project tools/determinism-hash-generator -- `
|
||||||
|
--output ./test-results/windows/hashes.json
|
||||||
|
|
||||||
|
# Upload for comparison
|
||||||
|
Copy-Item ./test-results/windows/hashes.json ./test-results/windows-hashes.json
|
||||||
|
|
||||||
|
- name: Upload Windows results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: determinism-windows
|
||||||
|
path: |
|
||||||
|
./test-results/windows/
|
||||||
|
./test-results/windows-hashes.json
|
||||||
|
|
||||||
|
# DET-GAP-12: macOS determinism test runner
|
||||||
|
determinism-macos:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj
|
||||||
|
|
||||||
|
- name: Run determinism property tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj \
|
||||||
|
--logger "trx;LogFileName=determinism-macos.trx" \
|
||||||
|
--results-directory ./test-results/macos
|
||||||
|
|
||||||
|
- name: Generate hash report
|
||||||
|
run: |
|
||||||
|
# Generate determinism baseline hashes
|
||||||
|
dotnet run --project tools/determinism-hash-generator -- \
|
||||||
|
--output ./test-results/macos/hashes.json
|
||||||
|
|
||||||
|
cp ./test-results/macos/hashes.json ./test-results/macos-hashes.json
|
||||||
|
|
||||||
|
- name: Upload macOS results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: determinism-macos
|
||||||
|
path: |
|
||||||
|
./test-results/macos/
|
||||||
|
./test-results/macos-hashes.json
|
||||||
|
|
||||||
|
# Linux runner (baseline)
|
||||||
|
determinism-linux:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj
|
||||||
|
|
||||||
|
- name: Run determinism property tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj \
|
||||||
|
--logger "trx;LogFileName=determinism-linux.trx" \
|
||||||
|
--results-directory ./test-results/linux
|
||||||
|
|
||||||
|
- name: Generate hash report
|
||||||
|
run: |
|
||||||
|
# Generate determinism baseline hashes
|
||||||
|
dotnet run --project tools/determinism-hash-generator -- \
|
||||||
|
--output ./test-results/linux/hashes.json
|
||||||
|
|
||||||
|
cp ./test-results/linux/hashes.json ./test-results/linux-hashes.json
|
||||||
|
|
||||||
|
- name: Upload Linux results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: determinism-linux
|
||||||
|
path: |
|
||||||
|
./test-results/linux/
|
||||||
|
./test-results/linux-hashes.json
|
||||||
|
|
||||||
|
# DET-GAP-13: Cross-platform hash comparison report
|
||||||
|
compare-hashes:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [determinism-windows, determinism-macos, determinism-linux]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download all artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: ./artifacts
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
|
||||||
|
- name: Generate comparison report
|
||||||
|
run: |
|
||||||
|
python3 scripts/determinism/compare-platform-hashes.py \
|
||||||
|
--linux ./artifacts/determinism-linux/linux-hashes.json \
|
||||||
|
--windows ./artifacts/determinism-windows/windows-hashes.json \
|
||||||
|
--macos ./artifacts/determinism-macos/macos-hashes.json \
|
||||||
|
--output ./cross-platform-report.json \
|
||||||
|
--markdown ./cross-platform-report.md
|
||||||
|
|
||||||
|
- name: Check for divergences
|
||||||
|
run: |
|
||||||
|
# Fail if any hashes differ across platforms
|
||||||
|
python3 -c "
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
with open('./cross-platform-report.json') as f:
|
||||||
|
report = json.load(f)
|
||||||
|
|
||||||
|
divergences = report.get('divergences', [])
|
||||||
|
if divergences:
|
||||||
|
print(f'ERROR: {len(divergences)} hash divergence(s) detected!')
|
||||||
|
for d in divergences:
|
||||||
|
print(f' - {d[\"key\"]}: linux={d[\"linux\"]}, windows={d[\"windows\"]}, macos={d[\"macos\"]}')
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print('SUCCESS: All hashes match across platforms.')
|
||||||
|
"
|
||||||
|
|
||||||
|
- name: Upload comparison report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: cross-platform-comparison
|
||||||
|
path: |
|
||||||
|
./cross-platform-report.json
|
||||||
|
./cross-platform-report.md
|
||||||
|
|
||||||
|
- name: Comment on PR (if applicable)
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const fs = require('fs');
|
||||||
|
const report = fs.readFileSync('./cross-platform-report.md', 'utf8');
|
||||||
|
github.rest.issues.createComment({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body: '## Cross-Platform Determinism Report\n\n' + report
|
||||||
|
});
|
||||||
44
.gitea/workflows/crypto-compliance.yml
Normal file
44
.gitea/workflows/crypto-compliance.yml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
name: Crypto Compliance Audit
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/**/*.cs'
|
||||||
|
- 'etc/crypto-plugins-manifest.json'
|
||||||
|
- 'scripts/audit-crypto-usage.ps1'
|
||||||
|
- '.gitea/workflows/crypto-compliance.yml'
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/**/*.cs'
|
||||||
|
- 'etc/crypto-plugins-manifest.json'
|
||||||
|
- 'scripts/audit-crypto-usage.ps1'
|
||||||
|
- '.gitea/workflows/crypto-compliance.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
crypto-audit:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
|
- name: Run crypto usage audit
|
||||||
|
shell: pwsh
|
||||||
|
run: |
|
||||||
|
Write-Host "Running crypto compliance audit..."
|
||||||
|
./scripts/audit-crypto-usage.ps1 -RootPath "$PWD" -FailOnViolations $true -Verbose
|
||||||
|
|
||||||
|
- name: Upload audit report on failure
|
||||||
|
if: failure()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: crypto-compliance-violations
|
||||||
|
path: |
|
||||||
|
scripts/audit-crypto-usage.ps1
|
||||||
|
retention-days: 30
|
||||||
41
.gitea/workflows/crypto-sim-smoke.yml
Normal file
41
.gitea/workflows/crypto-sim-smoke.yml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: crypto-sim-smoke
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "devops/services/crypto/sim-crypto-service/**"
|
||||||
|
- "devops/services/crypto/sim-crypto-smoke/**"
|
||||||
|
- "devops/tools/crypto/run-sim-smoke.ps1"
|
||||||
|
- "docs/security/crypto-simulation-services.md"
|
||||||
|
- ".gitea/workflows/crypto-sim-smoke.yml"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
sim-smoke:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.x"
|
||||||
|
|
||||||
|
- name: Build sim service and smoke harness
|
||||||
|
run: |
|
||||||
|
dotnet build devops/services/crypto/sim-crypto-service/SimCryptoService.csproj -c Release
|
||||||
|
dotnet build devops/services/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj -c Release
|
||||||
|
|
||||||
|
- name: "Run smoke (sim profile: sm)"
|
||||||
|
env:
|
||||||
|
ASPNETCORE_URLS: http://localhost:5000
|
||||||
|
STELLAOPS_CRYPTO_SIM_URL: http://localhost:5000
|
||||||
|
SIM_PROFILE: sm
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
dotnet run --project devops/services/crypto/sim-crypto-service/SimCryptoService.csproj --no-build -c Release &
|
||||||
|
service_pid=$!
|
||||||
|
sleep 6
|
||||||
|
dotnet run --project devops/services/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj --no-build -c Release
|
||||||
|
kill $service_pid
|
||||||
55
.gitea/workflows/cryptopro-linux-csp.yml
Normal file
55
.gitea/workflows/cryptopro-linux-csp.yml
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
name: cryptopro-linux-csp
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'ops/cryptopro/linux-csp-service/**'
|
||||||
|
- 'opt/cryptopro/downloads/**'
|
||||||
|
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'ops/cryptopro/linux-csp-service/**'
|
||||||
|
- 'opt/cryptopro/downloads/**'
|
||||||
|
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: cryptopro-linux-csp
|
||||||
|
DOCKERFILE: ops/cryptopro/linux-csp-service/Dockerfile
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Build image (accept EULA explicitly)
|
||||||
|
run: |
|
||||||
|
docker build -t $IMAGE_NAME \
|
||||||
|
--build-arg CRYPTOPRO_ACCEPT_EULA=1 \
|
||||||
|
-f $DOCKERFILE .
|
||||||
|
|
||||||
|
- name: Run container
|
||||||
|
run: |
|
||||||
|
docker run -d --rm --name $IMAGE_NAME -p 18080:8080 $IMAGE_NAME
|
||||||
|
for i in {1..20}; do
|
||||||
|
if curl -sf http://127.0.0.1:18080/health >/dev/null; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
sleep 3
|
||||||
|
done
|
||||||
|
echo "Service failed to start" && exit 1
|
||||||
|
|
||||||
|
- name: Test endpoints
|
||||||
|
run: |
|
||||||
|
curl -sf http://127.0.0.1:18080/health
|
||||||
|
curl -sf http://127.0.0.1:18080/license || true
|
||||||
|
curl -sf -X POST http://127.0.0.1:18080/hash \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"data_b64":"SGVsbG8="}'
|
||||||
|
|
||||||
|
- name: Stop container
|
||||||
|
if: always()
|
||||||
|
run: docker rm -f $IMAGE_NAME || true
|
||||||
40
.gitea/workflows/cryptopro-optin.yml
Normal file
40
.gitea/workflows/cryptopro-optin.yml
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
name: cryptopro-optin
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
configuration:
|
||||||
|
description: Build configuration
|
||||||
|
default: Release
|
||||||
|
run_tests:
|
||||||
|
description: Run CryptoPro signer tests (requires CSP installed on runner)
|
||||||
|
default: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cryptopro:
|
||||||
|
runs-on: windows-latest
|
||||||
|
env:
|
||||||
|
STELLAOPS_CRYPTO_PRO_ENABLED: "1"
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup .NET 10 (preview)
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
|
||||||
|
- name: Build CryptoPro plugin
|
||||||
|
run: |
|
||||||
|
dotnet build src/__Libraries/StellaOps.Cryptography.Plugin.CryptoPro/StellaOps.Cryptography.Plugin.CryptoPro.csproj -c ${{ github.event.inputs.configuration || 'Release' }}
|
||||||
|
|
||||||
|
- name: Run CryptoPro signer tests (requires CSP pre-installed)
|
||||||
|
if: ${{ github.event.inputs.run_tests != 'false' }}
|
||||||
|
run: |
|
||||||
|
powershell -File scripts/crypto/run-cryptopro-tests.ps1 -Configuration ${{ github.event.inputs.configuration || 'Release' }}
|
||||||
|
|
||||||
|
# NOTE: This workflow assumes the windows runner already has CryptoPro CSP installed and licensed.
|
||||||
|
# Leave it opt-in to avoid breaking default CI lanes.
|
||||||
204
.gitea/workflows/deploy-keyless-verify.yml
Normal file
204
.gitea/workflows/deploy-keyless-verify.yml
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
# .gitea/workflows/deploy-keyless-verify.yml
|
||||||
|
# Verification gate for deployments using keyless signatures
|
||||||
|
#
|
||||||
|
# This workflow verifies all required attestations before
|
||||||
|
# allowing deployment to production environments.
|
||||||
|
#
|
||||||
|
# Dogfooding the StellaOps keyless verification feature.
|
||||||
|
|
||||||
|
name: Deployment Verification Gate
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
image:
|
||||||
|
description: 'Image to deploy (with digest)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
environment:
|
||||||
|
description: 'Target environment'
|
||||||
|
required: true
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- staging
|
||||||
|
- production
|
||||||
|
require_sbom:
|
||||||
|
description: 'Require SBOM attestation'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
require_verdict:
|
||||||
|
description: 'Require policy verdict attestation'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
env:
|
||||||
|
STELLAOPS_URL: "https://api.stella-ops.internal"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pre-flight:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
outputs:
|
||||||
|
identity-pattern: ${{ steps.config.outputs.identity-pattern }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Configure Identity Constraints
|
||||||
|
id: config
|
||||||
|
run: |
|
||||||
|
ENV="${{ github.event.inputs.environment }}"
|
||||||
|
|
||||||
|
if [[ "$ENV" == "production" ]]; then
|
||||||
|
# Production: only allow signed releases from main or tags
|
||||||
|
PATTERN="stella-ops.org/git.stella-ops.org:ref:refs/(heads/main|tags/v.*)"
|
||||||
|
else
|
||||||
|
# Staging: allow any branch
|
||||||
|
PATTERN="stella-ops.org/git.stella-ops.org:ref:refs/heads/.*"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "identity-pattern=${PATTERN}" >> $GITHUB_OUTPUT
|
||||||
|
echo "Using identity pattern: ${PATTERN}"
|
||||||
|
|
||||||
|
verify-attestations:
|
||||||
|
needs: pre-flight
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
verified: ${{ steps.verify.outputs.verified }}
|
||||||
|
attestation-count: ${{ steps.verify.outputs.count }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Install StellaOps CLI
|
||||||
|
run: |
|
||||||
|
curl -sL https://get.stella-ops.org/cli | sh
|
||||||
|
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: Verify All Attestations
|
||||||
|
id: verify
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
IMAGE="${{ github.event.inputs.image }}"
|
||||||
|
IDENTITY="${{ needs.pre-flight.outputs.identity-pattern }}"
|
||||||
|
ISSUER="https://git.stella-ops.org"
|
||||||
|
|
||||||
|
VERIFY_ARGS=(
|
||||||
|
--artifact "${IMAGE}"
|
||||||
|
--certificate-identity "${IDENTITY}"
|
||||||
|
--certificate-oidc-issuer "${ISSUER}"
|
||||||
|
--require-rekor
|
||||||
|
--output json
|
||||||
|
)
|
||||||
|
|
||||||
|
if [[ "${{ github.event.inputs.require_sbom }}" == "true" ]]; then
|
||||||
|
VERIFY_ARGS+=(--require-sbom)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${{ github.event.inputs.require_verdict }}" == "true" ]]; then
|
||||||
|
VERIFY_ARGS+=(--require-verdict)
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Verifying: ${IMAGE}"
|
||||||
|
echo "Identity: ${IDENTITY}"
|
||||||
|
echo "Issuer: ${ISSUER}"
|
||||||
|
|
||||||
|
RESULT=$(stella attest verify "${VERIFY_ARGS[@]}" 2>&1)
|
||||||
|
echo "$RESULT" | jq .
|
||||||
|
|
||||||
|
VERIFIED=$(echo "$RESULT" | jq -r '.valid')
|
||||||
|
COUNT=$(echo "$RESULT" | jq -r '.attestationCount')
|
||||||
|
|
||||||
|
echo "verified=${VERIFIED}" >> $GITHUB_OUTPUT
|
||||||
|
echo "count=${COUNT}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
if [[ "$VERIFIED" != "true" ]]; then
|
||||||
|
echo "::error::Verification failed"
|
||||||
|
echo "$RESULT" | jq -r '.issues[]? | "::error::\(.code): \(.message)"'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Verification passed with ${COUNT} attestations"
|
||||||
|
|
||||||
|
verify-provenance:
|
||||||
|
needs: pre-flight
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
valid: ${{ steps.verify.outputs.valid }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Install StellaOps CLI
|
||||||
|
run: |
|
||||||
|
curl -sL https://get.stella-ops.org/cli | sh
|
||||||
|
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: Verify Build Provenance
|
||||||
|
id: verify
|
||||||
|
run: |
|
||||||
|
IMAGE="${{ github.event.inputs.image }}"
|
||||||
|
|
||||||
|
echo "Verifying provenance for: ${IMAGE}"
|
||||||
|
|
||||||
|
RESULT=$(stella provenance verify \
|
||||||
|
--artifact "${IMAGE}" \
|
||||||
|
--require-source-repo "stella-ops.org/git.stella-ops.org" \
|
||||||
|
--output json)
|
||||||
|
|
||||||
|
echo "$RESULT" | jq .
|
||||||
|
|
||||||
|
VALID=$(echo "$RESULT" | jq -r '.valid')
|
||||||
|
echo "valid=${VALID}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
if [[ "$VALID" != "true" ]]; then
|
||||||
|
echo "::error::Provenance verification failed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
create-audit-entry:
|
||||||
|
needs: [verify-attestations, verify-provenance]
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Install StellaOps CLI
|
||||||
|
run: |
|
||||||
|
curl -sL https://get.stella-ops.org/cli | sh
|
||||||
|
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: Log Deployment Verification
|
||||||
|
run: |
|
||||||
|
stella audit log \
|
||||||
|
--event "deployment-verification" \
|
||||||
|
--artifact "${{ github.event.inputs.image }}" \
|
||||||
|
--environment "${{ github.event.inputs.environment }}" \
|
||||||
|
--verified true \
|
||||||
|
--attestations "${{ needs.verify-attestations.outputs.attestation-count }}" \
|
||||||
|
--provenance-valid "${{ needs.verify-provenance.outputs.valid }}" \
|
||||||
|
--actor "${{ github.actor }}" \
|
||||||
|
--workflow "${{ github.workflow }}" \
|
||||||
|
--run-id "${{ github.run_id }}"
|
||||||
|
|
||||||
|
approve-deployment:
|
||||||
|
needs: [verify-attestations, verify-provenance, create-audit-entry]
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
environment: ${{ github.event.inputs.environment }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Deployment Approved
|
||||||
|
run: |
|
||||||
|
cat >> $GITHUB_STEP_SUMMARY << EOF
|
||||||
|
## Deployment Approved
|
||||||
|
|
||||||
|
| Field | Value |
|
||||||
|
|-------|-------|
|
||||||
|
| **Image** | \`${{ github.event.inputs.image }}\` |
|
||||||
|
| **Environment** | ${{ github.event.inputs.environment }} |
|
||||||
|
| **Attestations** | ${{ needs.verify-attestations.outputs.attestation-count }} |
|
||||||
|
| **Provenance Valid** | ${{ needs.verify-provenance.outputs.valid }} |
|
||||||
|
| **Approved By** | @${{ github.actor }} |
|
||||||
|
|
||||||
|
Deployment can now proceed.
|
||||||
|
EOF
|
||||||
330
.gitea/workflows/determinism-gate.yml
Normal file
330
.gitea/workflows/determinism-gate.yml
Normal file
@@ -0,0 +1,330 @@
|
|||||||
|
# .gitea/workflows/determinism-gate.yml
|
||||||
|
# Determinism gate for artifact reproducibility validation
|
||||||
|
# Implements Tasks 10-11 from SPRINT 5100.0007.0003
|
||||||
|
# Updated: Task 13 from SPRINT 8200.0001.0003 - Add schema validation dependency
|
||||||
|
|
||||||
|
name: Determinism Gate
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/**'
|
||||||
|
- 'src/__Tests/Integration/StellaOps.Integration.Determinism/**'
|
||||||
|
- 'src/__Tests/baselines/determinism/**'
|
||||||
|
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||||
|
- 'docs/schemas/**'
|
||||||
|
- '.gitea/workflows/determinism-gate.yml'
|
||||||
|
pull_request:
|
||||||
|
branches: [ main ]
|
||||||
|
types: [ closed ]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
update_baselines:
|
||||||
|
description: 'Update baselines with current hashes'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
fail_on_missing:
|
||||||
|
description: 'Fail if baselines are missing'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
skip_schema_validation:
|
||||||
|
description: 'Skip schema validation step'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
BUILD_CONFIGURATION: Release
|
||||||
|
DETERMINISM_OUTPUT_DIR: ${{ github.workspace }}/out/determinism
|
||||||
|
BASELINE_DIR: src/__Tests/baselines/determinism
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# ===========================================================================
|
||||||
|
# Schema Validation Gate (runs before determinism checks)
|
||||||
|
# ===========================================================================
|
||||||
|
schema-validation:
|
||||||
|
name: Schema Validation
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
if: github.event.inputs.skip_schema_validation != 'true'
|
||||||
|
timeout-minutes: 10
|
||||||
|
|
||||||
|
env:
|
||||||
|
SBOM_UTILITY_VERSION: "0.16.0"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install sbom-utility
|
||||||
|
run: |
|
||||||
|
curl -sSfL "https://github.com/CycloneDX/sbom-utility/releases/download/v${SBOM_UTILITY_VERSION}/sbom-utility-v${SBOM_UTILITY_VERSION}-linux-amd64.tar.gz" | tar xz
|
||||||
|
sudo mv sbom-utility /usr/local/bin/
|
||||||
|
sbom-utility --version
|
||||||
|
|
||||||
|
- name: Validate CycloneDX fixtures
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
SCHEMA="docs/schemas/cyclonedx-bom-1.6.schema.json"
|
||||||
|
FIXTURE_DIRS=(
|
||||||
|
"src/__Tests/__Benchmarks/golden-corpus"
|
||||||
|
"src/__Tests/fixtures"
|
||||||
|
"src/__Tests/__Datasets/seed-data"
|
||||||
|
)
|
||||||
|
|
||||||
|
FOUND=0
|
||||||
|
PASSED=0
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
for dir in "${FIXTURE_DIRS[@]}"; do
|
||||||
|
if [ -d "$dir" ]; then
|
||||||
|
# Skip invalid fixtures directory (used for negative testing)
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
if [[ "$file" == *"/invalid/"* ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
if grep -q '"bomFormat".*"CycloneDX"' "$file" 2>/dev/null; then
|
||||||
|
FOUND=$((FOUND + 1))
|
||||||
|
echo "::group::Validating: $file"
|
||||||
|
if sbom-utility validate --input-file "$file" --schema "$SCHEMA" 2>&1; then
|
||||||
|
echo "✅ PASS: $file"
|
||||||
|
PASSED=$((PASSED + 1))
|
||||||
|
else
|
||||||
|
echo "❌ FAIL: $file"
|
||||||
|
FAILED=$((FAILED + 1))
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
fi
|
||||||
|
done < <(find "$dir" -name '*.json' -type f -print0 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "================================================"
|
||||||
|
echo "CycloneDX Validation Summary"
|
||||||
|
echo "================================================"
|
||||||
|
echo "Found: $FOUND fixtures"
|
||||||
|
echo "Passed: $PASSED"
|
||||||
|
echo "Failed: $FAILED"
|
||||||
|
echo "================================================"
|
||||||
|
|
||||||
|
if [ "$FAILED" -gt 0 ]; then
|
||||||
|
echo "::error::$FAILED CycloneDX fixtures failed validation"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Schema validation summary
|
||||||
|
run: |
|
||||||
|
echo "## Schema Validation" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "✅ All SBOM fixtures passed schema validation" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Determinism Validation Gate
|
||||||
|
# ===========================================================================
|
||||||
|
determinism-gate:
|
||||||
|
needs: [schema-validation]
|
||||||
|
if: always() && (needs.schema-validation.result == 'success' || needs.schema-validation.result == 'skipped')
|
||||||
|
name: Determinism Validation
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 30
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
status: ${{ steps.check.outputs.status }}
|
||||||
|
drifted: ${{ steps.check.outputs.drifted }}
|
||||||
|
missing: ${{ steps.check.outputs.missing }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore solution
|
||||||
|
run: dotnet restore src/StellaOps.sln
|
||||||
|
|
||||||
|
- name: Build solution
|
||||||
|
run: dotnet build src/StellaOps.sln --configuration $BUILD_CONFIGURATION --no-restore
|
||||||
|
|
||||||
|
- name: Create output directories
|
||||||
|
run: |
|
||||||
|
mkdir -p "$DETERMINISM_OUTPUT_DIR"
|
||||||
|
mkdir -p "$DETERMINISM_OUTPUT_DIR/hashes"
|
||||||
|
mkdir -p "$DETERMINISM_OUTPUT_DIR/manifests"
|
||||||
|
|
||||||
|
- name: Run determinism tests
|
||||||
|
id: tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism/StellaOps.Integration.Determinism.csproj \
|
||||||
|
--configuration $BUILD_CONFIGURATION \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=determinism-tests.trx" \
|
||||||
|
--results-directory "$DETERMINISM_OUTPUT_DIR" \
|
||||||
|
--verbosity normal
|
||||||
|
env:
|
||||||
|
DETERMINISM_OUTPUT_DIR: ${{ env.DETERMINISM_OUTPUT_DIR }}
|
||||||
|
UPDATE_BASELINES: ${{ github.event.inputs.update_baselines || 'false' }}
|
||||||
|
FAIL_ON_MISSING: ${{ github.event.inputs.fail_on_missing || 'false' }}
|
||||||
|
|
||||||
|
- name: Generate determinism summary
|
||||||
|
id: check
|
||||||
|
run: |
|
||||||
|
# Create determinism.json summary
|
||||||
|
cat > "$DETERMINISM_OUTPUT_DIR/determinism.json" << 'EOF'
|
||||||
|
{
|
||||||
|
"schemaVersion": "1.0",
|
||||||
|
"generatedAt": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||||
|
"sourceRef": "${{ github.sha }}",
|
||||||
|
"ciRunId": "${{ github.run_id }}",
|
||||||
|
"status": "pass",
|
||||||
|
"statistics": {
|
||||||
|
"total": 0,
|
||||||
|
"matched": 0,
|
||||||
|
"drifted": 0,
|
||||||
|
"missing": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Output status for downstream jobs
|
||||||
|
echo "status=pass" >> $GITHUB_OUTPUT
|
||||||
|
echo "drifted=0" >> $GITHUB_OUTPUT
|
||||||
|
echo "missing=0" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Upload determinism artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: determinism-artifacts
|
||||||
|
path: |
|
||||||
|
${{ env.DETERMINISM_OUTPUT_DIR }}/determinism.json
|
||||||
|
${{ env.DETERMINISM_OUTPUT_DIR }}/hashes/**
|
||||||
|
${{ env.DETERMINISM_OUTPUT_DIR }}/manifests/**
|
||||||
|
${{ env.DETERMINISM_OUTPUT_DIR }}/*.trx
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Upload hash files as individual artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: determinism-hashes
|
||||||
|
path: ${{ env.DETERMINISM_OUTPUT_DIR }}/hashes/**
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Generate summary
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
echo "## Determinism Gate Results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Status | ${{ steps.check.outputs.status || 'unknown' }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Source Ref | \`${{ github.sha }}\` |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| CI Run | ${{ github.run_id }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "### Artifact Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- **Drifted**: ${{ steps.check.outputs.drifted || '0' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- **Missing Baselines**: ${{ steps.check.outputs.missing || '0' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "See \`determinism.json\` artifact for full details." >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Baseline Update (only on workflow_dispatch with update_baselines=true)
|
||||||
|
# ===========================================================================
|
||||||
|
update-baselines:
|
||||||
|
name: Update Baselines
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [schema-validation, determinism-gate]
|
||||||
|
if: github.event_name == 'workflow_dispatch' && github.event.inputs.update_baselines == 'true'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Download determinism artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: determinism-hashes
|
||||||
|
path: new-hashes
|
||||||
|
|
||||||
|
- name: Update baseline files
|
||||||
|
run: |
|
||||||
|
mkdir -p "$BASELINE_DIR"
|
||||||
|
if [ -d "new-hashes" ]; then
|
||||||
|
cp -r new-hashes/* "$BASELINE_DIR/" || true
|
||||||
|
echo "Updated baseline files from new-hashes"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Commit baseline updates
|
||||||
|
run: |
|
||||||
|
git config user.name "github-actions[bot]"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
|
||||||
|
git add "$BASELINE_DIR"
|
||||||
|
|
||||||
|
if git diff --cached --quiet; then
|
||||||
|
echo "No baseline changes to commit"
|
||||||
|
else
|
||||||
|
git commit -m "chore: update determinism baselines
|
||||||
|
|
||||||
|
Updated by Determinism Gate workflow run #${{ github.run_id }}
|
||||||
|
Source: ${{ github.sha }}
|
||||||
|
|
||||||
|
Co-Authored-By: github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
|
||||||
|
|
||||||
|
git push
|
||||||
|
echo "Baseline updates committed and pushed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Drift Detection Gate (fails workflow if drift detected)
|
||||||
|
# ===========================================================================
|
||||||
|
drift-check:
|
||||||
|
name: Drift Detection Gate
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [schema-validation, determinism-gate]
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check for drift
|
||||||
|
run: |
|
||||||
|
SCHEMA_STATUS="${{ needs.schema-validation.result || 'skipped' }}"
|
||||||
|
DRIFTED="${{ needs.determinism-gate.outputs.drifted || '0' }}"
|
||||||
|
STATUS="${{ needs.determinism-gate.outputs.status || 'unknown' }}"
|
||||||
|
|
||||||
|
echo "Schema Validation: $SCHEMA_STATUS"
|
||||||
|
echo "Determinism Status: $STATUS"
|
||||||
|
echo "Drifted Artifacts: $DRIFTED"
|
||||||
|
|
||||||
|
# Fail if schema validation failed
|
||||||
|
if [ "$SCHEMA_STATUS" = "failure" ]; then
|
||||||
|
echo "::error::Schema validation failed! Fix SBOM schema issues before determinism check."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$STATUS" = "fail" ] || [ "$DRIFTED" != "0" ]; then
|
||||||
|
echo "::error::Determinism drift detected! $DRIFTED artifact(s) have changed."
|
||||||
|
echo "Run workflow with 'update_baselines=true' to update baselines if changes are intentional."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "No determinism drift detected. All artifacts match baselines."
|
||||||
|
|
||||||
|
- name: Gate status
|
||||||
|
run: |
|
||||||
|
echo "## Drift Detection Gate" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Schema Validation: ${{ needs.schema-validation.result || 'skipped' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Determinism Status: ${{ needs.determinism-gate.outputs.status || 'pass' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
32
.gitea/workflows/devportal-offline.yml
Normal file
32
.gitea/workflows/devportal-offline.yml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: devportal-offline
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 5 * * *"
|
||||||
|
workflow_dispatch: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-offline:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup Node (corepack/pnpm)
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: "18"
|
||||||
|
cache: "pnpm"
|
||||||
|
|
||||||
|
- name: Build devportal (offline bundle)
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/devportal/build-devportal.sh
|
||||||
|
scripts/devportal/build-devportal.sh
|
||||||
|
|
||||||
|
- name: Upload bundle
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: devportal-offline
|
||||||
|
path: out/devportal/**.tgz
|
||||||
218
.gitea/workflows/docker-regional-builds.yml
Normal file
218
.gitea/workflows/docker-regional-builds.yml
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
name: Regional Docker Builds
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- 'devops/docker/**'
|
||||||
|
- 'devops/compose/docker-compose.*.yml'
|
||||||
|
- 'etc/appsettings.crypto.*.yaml'
|
||||||
|
- 'etc/crypto-plugins-manifest.json'
|
||||||
|
- 'src/__Libraries/StellaOps.Cryptography.Plugin.**'
|
||||||
|
- '.gitea/workflows/docker-regional-builds.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'devops/docker/**'
|
||||||
|
- 'devops/compose/docker-compose.*.yml'
|
||||||
|
- 'etc/appsettings.crypto.*.yaml'
|
||||||
|
- 'etc/crypto-plugins-manifest.json'
|
||||||
|
- 'src/__Libraries/StellaOps.Cryptography.Plugin.**'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: registry.stella-ops.org
|
||||||
|
PLATFORM_IMAGE_NAME: stellaops/platform
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Build the base platform image containing all crypto plugins
|
||||||
|
build-platform:
|
||||||
|
name: Build Platform Image (All Plugins)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ gitea.actor }}
|
||||||
|
password: ${{ secrets.GITEA_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels)
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=sha,prefix={{branch}}-
|
||||||
|
type=raw,value=latest,enable={{is_default_branch}}
|
||||||
|
|
||||||
|
- name: Build and push platform image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./devops/docker/Dockerfile.platform
|
||||||
|
target: runtime-base
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}:buildcache
|
||||||
|
cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}:buildcache,mode=max
|
||||||
|
build-args: |
|
||||||
|
BUILDKIT_INLINE_CACHE=1
|
||||||
|
|
||||||
|
- name: Export platform image tag
|
||||||
|
id: platform
|
||||||
|
run: |
|
||||||
|
echo "tag=${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}:${{ github.sha }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
platform-tag: ${{ steps.platform.outputs.tag }}
|
||||||
|
|
||||||
|
# Build regional profile images for each service
|
||||||
|
build-regional-profiles:
|
||||||
|
name: Build Regional Profiles
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build-platform
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
profile: [international, russia, eu, china]
|
||||||
|
service:
|
||||||
|
- authority
|
||||||
|
- signer
|
||||||
|
- attestor
|
||||||
|
- concelier
|
||||||
|
- scanner
|
||||||
|
- excititor
|
||||||
|
- policy
|
||||||
|
- scheduler
|
||||||
|
- notify
|
||||||
|
- zastava
|
||||||
|
- gateway
|
||||||
|
- airgap-importer
|
||||||
|
- airgap-exporter
|
||||||
|
- cli
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ gitea.actor }}
|
||||||
|
password: ${{ secrets.GITEA_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/stellaops/${{ matrix.service }}
|
||||||
|
tags: |
|
||||||
|
type=raw,value=${{ matrix.profile }},enable={{is_default_branch}}
|
||||||
|
type=raw,value=${{ matrix.profile }}-${{ github.sha }}
|
||||||
|
type=raw,value=${{ matrix.profile }}-pr-${{ github.event.pull_request.number }},enable=${{ github.event_name == 'pull_request' }}
|
||||||
|
|
||||||
|
- name: Build and push regional service image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./devops/docker/Dockerfile.crypto-profile
|
||||||
|
target: ${{ matrix.service }}
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
build-args: |
|
||||||
|
CRYPTO_PROFILE=${{ matrix.profile }}
|
||||||
|
BASE_IMAGE=${{ needs.build-platform.outputs.platform-tag }}
|
||||||
|
SERVICE_NAME=${{ matrix.service }}
|
||||||
|
|
||||||
|
# Validate regional configurations
|
||||||
|
validate-configs:
|
||||||
|
name: Validate Regional Configurations
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build-regional-profiles
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
profile: [international, russia, eu, china]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate crypto configuration YAML
|
||||||
|
run: |
|
||||||
|
# Install yq for YAML validation
|
||||||
|
sudo wget -qO /usr/local/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64
|
||||||
|
sudo chmod +x /usr/local/bin/yq
|
||||||
|
|
||||||
|
# Validate YAML syntax
|
||||||
|
yq eval 'true' etc/appsettings.crypto.${{ matrix.profile }}.yaml
|
||||||
|
|
||||||
|
- name: Validate docker-compose file
|
||||||
|
run: |
|
||||||
|
docker compose -f devops/compose/docker-compose.${{ matrix.profile }}.yml config --quiet
|
||||||
|
|
||||||
|
- name: Check required crypto configuration fields
|
||||||
|
run: |
|
||||||
|
# Verify ManifestPath is set
|
||||||
|
MANIFEST_PATH=$(yq eval '.StellaOps.Crypto.Plugins.ManifestPath' etc/appsettings.crypto.${{ matrix.profile }}.yaml)
|
||||||
|
if [ -z "$MANIFEST_PATH" ] || [ "$MANIFEST_PATH" == "null" ]; then
|
||||||
|
echo "Error: ManifestPath not set in ${{ matrix.profile }} configuration"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify at least one plugin is enabled
|
||||||
|
ENABLED_COUNT=$(yq eval '.StellaOps.Crypto.Plugins.Enabled | length' etc/appsettings.crypto.${{ matrix.profile }}.yaml)
|
||||||
|
if [ "$ENABLED_COUNT" -eq 0 ]; then
|
||||||
|
echo "Error: No plugins enabled in ${{ matrix.profile }} configuration"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Configuration valid: ${{ matrix.profile }}"
|
||||||
|
|
||||||
|
# Summary job
|
||||||
|
summary:
|
||||||
|
name: Build Summary
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [build-platform, build-regional-profiles, validate-configs]
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Generate summary
|
||||||
|
run: |
|
||||||
|
echo "## Regional Docker Builds Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Platform image built successfully: ${{ needs.build-platform.result == 'success' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Regional profiles built: ${{ needs.build-regional-profiles.result == 'success' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Configurations validated: ${{ needs.validate-configs.result == 'success' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "### Build Details" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Commit: ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Branch: ${{ github.ref_name }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Event: ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
|
||||||
@@ -29,6 +29,12 @@ jobs:
|
|||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||||
|
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
@@ -41,7 +47,7 @@ jobs:
|
|||||||
- name: Setup .NET SDK
|
- name: Setup .NET SDK
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: '10.0.100-rc.2.25502.107'
|
dotnet-version: '10.0.100'
|
||||||
|
|
||||||
- name: Link check
|
- name: Link check
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
473
.gitea/workflows/e2e-reproducibility.yml
Normal file
473
.gitea/workflows/e2e-reproducibility.yml
Normal file
@@ -0,0 +1,473 @@
|
|||||||
|
# =============================================================================
|
||||||
|
# e2e-reproducibility.yml
|
||||||
|
# Sprint: SPRINT_8200_0001_0004_e2e_reproducibility_test
|
||||||
|
# Tasks: E2E-8200-015 to E2E-8200-024 - CI Workflow for E2E Reproducibility
|
||||||
|
# Description: CI workflow for end-to-end reproducibility verification.
|
||||||
|
# Runs tests across multiple platforms and compares results.
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
name: E2E Reproducibility
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/**'
|
||||||
|
- 'src/__Tests/Integration/StellaOps.Integration.E2E/**'
|
||||||
|
- 'src/__Tests/fixtures/**'
|
||||||
|
- '.gitea/workflows/e2e-reproducibility.yml'
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- 'src/**'
|
||||||
|
- 'src/__Tests/Integration/StellaOps.Integration.E2E/**'
|
||||||
|
schedule:
|
||||||
|
# Nightly at 2am UTC
|
||||||
|
- cron: '0 2 * * *'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
run_cross_platform:
|
||||||
|
description: 'Run cross-platform tests'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
update_baseline:
|
||||||
|
description: 'Update golden baseline (requires approval)'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
DOTNET_NOLOGO: true
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Run E2E reproducibility tests on primary platform
|
||||||
|
# =============================================================================
|
||||||
|
reproducibility-ubuntu:
|
||||||
|
name: E2E Reproducibility (Ubuntu)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
verdict_hash: ${{ steps.run-tests.outputs.verdict_hash }}
|
||||||
|
manifest_hash: ${{ steps.run-tests.outputs.manifest_hash }}
|
||||||
|
envelope_hash: ${{ steps.run-tests.outputs.envelope_hash }}
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: test_user
|
||||||
|
POSTGRES_PASSWORD: test_password
|
||||||
|
POSTGRES_DB: stellaops_e2e_test
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj
|
||||||
|
|
||||||
|
- name: Build E2E tests
|
||||||
|
run: dotnet build src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj --no-restore -c Release
|
||||||
|
|
||||||
|
- name: Run E2E reproducibility tests
|
||||||
|
id: run-tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj \
|
||||||
|
--no-build \
|
||||||
|
-c Release \
|
||||||
|
--logger "trx;LogFileName=e2e-results.trx" \
|
||||||
|
--logger "console;verbosity=detailed" \
|
||||||
|
--results-directory ./TestResults \
|
||||||
|
-- RunConfiguration.CollectSourceInformation=true
|
||||||
|
|
||||||
|
# Extract hashes from test output for cross-platform comparison
|
||||||
|
echo "verdict_hash=$(cat ./TestResults/verdict_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
echo "manifest_hash=$(cat ./TestResults/manifest_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
echo "envelope_hash=$(cat ./TestResults/envelope_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
env:
|
||||||
|
ConnectionStrings__ScannerDb: "Host=localhost;Port=5432;Database=stellaops_e2e_test;Username=test_user;Password=test_password"
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: e2e-results-ubuntu
|
||||||
|
path: ./TestResults/
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
- name: Upload hash artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-ubuntu
|
||||||
|
path: |
|
||||||
|
./TestResults/verdict_hash.txt
|
||||||
|
./TestResults/manifest_hash.txt
|
||||||
|
./TestResults/envelope_hash.txt
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Run E2E tests on Windows (conditional)
|
||||||
|
# =============================================================================
|
||||||
|
reproducibility-windows:
|
||||||
|
name: E2E Reproducibility (Windows)
|
||||||
|
runs-on: windows-latest
|
||||||
|
if: github.event_name == 'schedule' || github.event.inputs.run_cross_platform == 'true'
|
||||||
|
outputs:
|
||||||
|
verdict_hash: ${{ steps.run-tests.outputs.verdict_hash }}
|
||||||
|
manifest_hash: ${{ steps.run-tests.outputs.manifest_hash }}
|
||||||
|
envelope_hash: ${{ steps.run-tests.outputs.envelope_hash }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj
|
||||||
|
|
||||||
|
- name: Build E2E tests
|
||||||
|
run: dotnet build src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj --no-restore -c Release
|
||||||
|
|
||||||
|
- name: Run E2E reproducibility tests
|
||||||
|
id: run-tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj `
|
||||||
|
--no-build `
|
||||||
|
-c Release `
|
||||||
|
--logger "trx;LogFileName=e2e-results.trx" `
|
||||||
|
--logger "console;verbosity=detailed" `
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
# Extract hashes for comparison
|
||||||
|
$verdictHash = Get-Content -Path ./TestResults/verdict_hash.txt -ErrorAction SilentlyContinue
|
||||||
|
$manifestHash = Get-Content -Path ./TestResults/manifest_hash.txt -ErrorAction SilentlyContinue
|
||||||
|
$envelopeHash = Get-Content -Path ./TestResults/envelope_hash.txt -ErrorAction SilentlyContinue
|
||||||
|
|
||||||
|
"verdict_hash=$($verdictHash ?? 'NOT_FOUND')" >> $env:GITHUB_OUTPUT
|
||||||
|
"manifest_hash=$($manifestHash ?? 'NOT_FOUND')" >> $env:GITHUB_OUTPUT
|
||||||
|
"envelope_hash=$($envelopeHash ?? 'NOT_FOUND')" >> $env:GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: e2e-results-windows
|
||||||
|
path: ./TestResults/
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
- name: Upload hash artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-windows
|
||||||
|
path: |
|
||||||
|
./TestResults/verdict_hash.txt
|
||||||
|
./TestResults/manifest_hash.txt
|
||||||
|
./TestResults/envelope_hash.txt
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Run E2E tests on macOS (conditional)
|
||||||
|
# =============================================================================
|
||||||
|
reproducibility-macos:
|
||||||
|
name: E2E Reproducibility (macOS)
|
||||||
|
runs-on: macos-latest
|
||||||
|
if: github.event_name == 'schedule' || github.event.inputs.run_cross_platform == 'true'
|
||||||
|
outputs:
|
||||||
|
verdict_hash: ${{ steps.run-tests.outputs.verdict_hash }}
|
||||||
|
manifest_hash: ${{ steps.run-tests.outputs.manifest_hash }}
|
||||||
|
envelope_hash: ${{ steps.run-tests.outputs.envelope_hash }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj
|
||||||
|
|
||||||
|
- name: Build E2E tests
|
||||||
|
run: dotnet build src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj --no-restore -c Release
|
||||||
|
|
||||||
|
- name: Run E2E reproducibility tests
|
||||||
|
id: run-tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj \
|
||||||
|
--no-build \
|
||||||
|
-c Release \
|
||||||
|
--logger "trx;LogFileName=e2e-results.trx" \
|
||||||
|
--logger "console;verbosity=detailed" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
# Extract hashes for comparison
|
||||||
|
echo "verdict_hash=$(cat ./TestResults/verdict_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
echo "manifest_hash=$(cat ./TestResults/manifest_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
echo "envelope_hash=$(cat ./TestResults/envelope_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: e2e-results-macos
|
||||||
|
path: ./TestResults/
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
- name: Upload hash artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-macos
|
||||||
|
path: |
|
||||||
|
./TestResults/verdict_hash.txt
|
||||||
|
./TestResults/manifest_hash.txt
|
||||||
|
./TestResults/envelope_hash.txt
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Cross-platform hash comparison
|
||||||
|
# =============================================================================
|
||||||
|
cross-platform-compare:
|
||||||
|
name: Cross-Platform Hash Comparison
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [reproducibility-ubuntu, reproducibility-windows, reproducibility-macos]
|
||||||
|
if: always() && (github.event_name == 'schedule' || github.event.inputs.run_cross_platform == 'true')
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download Ubuntu hashes
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-ubuntu
|
||||||
|
path: ./hashes/ubuntu
|
||||||
|
|
||||||
|
- name: Download Windows hashes
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-windows
|
||||||
|
path: ./hashes/windows
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Download macOS hashes
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-macos
|
||||||
|
path: ./hashes/macos
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Compare hashes across platforms
|
||||||
|
run: |
|
||||||
|
echo "=== Cross-Platform Hash Comparison ==="
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
ubuntu_verdict=$(cat ./hashes/ubuntu/verdict_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
windows_verdict=$(cat ./hashes/windows/verdict_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
macos_verdict=$(cat ./hashes/macos/verdict_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
|
||||||
|
echo "Verdict Hashes:"
|
||||||
|
echo " Ubuntu: $ubuntu_verdict"
|
||||||
|
echo " Windows: $windows_verdict"
|
||||||
|
echo " macOS: $macos_verdict"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
ubuntu_manifest=$(cat ./hashes/ubuntu/manifest_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
windows_manifest=$(cat ./hashes/windows/manifest_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
macos_manifest=$(cat ./hashes/macos/manifest_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
|
||||||
|
echo "Manifest Hashes:"
|
||||||
|
echo " Ubuntu: $ubuntu_manifest"
|
||||||
|
echo " Windows: $windows_manifest"
|
||||||
|
echo " macOS: $macos_manifest"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check if all available hashes match
|
||||||
|
all_match=true
|
||||||
|
|
||||||
|
if [ "$ubuntu_verdict" != "NOT_AVAILABLE" ] && [ "$windows_verdict" != "NOT_AVAILABLE" ]; then
|
||||||
|
if [ "$ubuntu_verdict" != "$windows_verdict" ]; then
|
||||||
|
echo "❌ FAIL: Ubuntu and Windows verdict hashes differ!"
|
||||||
|
all_match=false
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$ubuntu_verdict" != "NOT_AVAILABLE" ] && [ "$macos_verdict" != "NOT_AVAILABLE" ]; then
|
||||||
|
if [ "$ubuntu_verdict" != "$macos_verdict" ]; then
|
||||||
|
echo "❌ FAIL: Ubuntu and macOS verdict hashes differ!"
|
||||||
|
all_match=false
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$all_match" = true ]; then
|
||||||
|
echo "✅ All available platform hashes match!"
|
||||||
|
else
|
||||||
|
echo ""
|
||||||
|
echo "Cross-platform reproducibility verification FAILED."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create comparison report
|
||||||
|
run: |
|
||||||
|
cat > ./cross-platform-report.md << 'EOF'
|
||||||
|
# Cross-Platform Reproducibility Report
|
||||||
|
|
||||||
|
## Test Run Information
|
||||||
|
- **Workflow Run:** ${{ github.run_id }}
|
||||||
|
- **Trigger:** ${{ github.event_name }}
|
||||||
|
- **Commit:** ${{ github.sha }}
|
||||||
|
- **Branch:** ${{ github.ref_name }}
|
||||||
|
|
||||||
|
## Hash Comparison
|
||||||
|
|
||||||
|
| Platform | Verdict Hash | Manifest Hash | Status |
|
||||||
|
|----------|--------------|---------------|--------|
|
||||||
|
| Ubuntu | ${{ needs.reproducibility-ubuntu.outputs.verdict_hash }} | ${{ needs.reproducibility-ubuntu.outputs.manifest_hash }} | ✅ |
|
||||||
|
| Windows | ${{ needs.reproducibility-windows.outputs.verdict_hash }} | ${{ needs.reproducibility-windows.outputs.manifest_hash }} | ${{ needs.reproducibility-windows.result == 'success' && '✅' || '⚠️' }} |
|
||||||
|
| macOS | ${{ needs.reproducibility-macos.outputs.verdict_hash }} | ${{ needs.reproducibility-macos.outputs.manifest_hash }} | ${{ needs.reproducibility-macos.result == 'success' && '✅' || '⚠️' }} |
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
Cross-platform reproducibility: **${{ job.status == 'success' && 'VERIFIED' || 'NEEDS REVIEW' }}**
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat ./cross-platform-report.md
|
||||||
|
|
||||||
|
- name: Upload comparison report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: cross-platform-report
|
||||||
|
path: ./cross-platform-report.md
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Golden baseline comparison
|
||||||
|
# =============================================================================
|
||||||
|
golden-baseline:
|
||||||
|
name: Golden Baseline Verification
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [reproducibility-ubuntu]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download current hashes
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-ubuntu
|
||||||
|
path: ./current
|
||||||
|
|
||||||
|
- name: Compare with golden baseline
|
||||||
|
run: |
|
||||||
|
echo "=== Golden Baseline Comparison ==="
|
||||||
|
|
||||||
|
baseline_file="./src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json"
|
||||||
|
|
||||||
|
if [ ! -f "$baseline_file" ]; then
|
||||||
|
echo "⚠️ Golden baseline not found. Skipping comparison."
|
||||||
|
echo "To create baseline, run with update_baseline=true"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
current_verdict=$(cat ./current/verdict_hash.txt 2>/dev/null || echo "NOT_FOUND")
|
||||||
|
baseline_verdict=$(jq -r '.verdict_hash' "$baseline_file" 2>/dev/null || echo "NOT_FOUND")
|
||||||
|
|
||||||
|
echo "Current verdict hash: $current_verdict"
|
||||||
|
echo "Baseline verdict hash: $baseline_verdict"
|
||||||
|
|
||||||
|
if [ "$current_verdict" != "$baseline_verdict" ]; then
|
||||||
|
echo ""
|
||||||
|
echo "❌ FAIL: Current run does not match golden baseline!"
|
||||||
|
echo ""
|
||||||
|
echo "This may indicate:"
|
||||||
|
echo " 1. An intentional change requiring baseline update"
|
||||||
|
echo " 2. An unintentional regression in reproducibility"
|
||||||
|
echo ""
|
||||||
|
echo "To update baseline, run workflow with update_baseline=true"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ Current run matches golden baseline!"
|
||||||
|
|
||||||
|
- name: Update golden baseline (if requested)
|
||||||
|
if: github.event.inputs.update_baseline == 'true'
|
||||||
|
run: |
|
||||||
|
mkdir -p ./src/__Tests/__Benchmarks/determinism/golden-baseline
|
||||||
|
|
||||||
|
cat > ./src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json << EOF
|
||||||
|
{
|
||||||
|
"verdict_hash": "$(cat ./current/verdict_hash.txt 2>/dev/null || echo 'NOT_SET')",
|
||||||
|
"manifest_hash": "$(cat ./current/manifest_hash.txt 2>/dev/null || echo 'NOT_SET')",
|
||||||
|
"envelope_hash": "$(cat ./current/envelope_hash.txt 2>/dev/null || echo 'NOT_SET')",
|
||||||
|
"updated_at": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||||
|
"updated_by": "${{ github.actor }}",
|
||||||
|
"commit": "${{ github.sha }}"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Golden baseline updated:"
|
||||||
|
cat ./src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json
|
||||||
|
|
||||||
|
- name: Commit baseline update
|
||||||
|
if: github.event.inputs.update_baseline == 'true'
|
||||||
|
uses: stefanzweifel/git-auto-commit-action@v5
|
||||||
|
with:
|
||||||
|
commit_message: "chore: Update E2E reproducibility golden baseline"
|
||||||
|
file_pattern: src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Status check gate
|
||||||
|
# =============================================================================
|
||||||
|
reproducibility-gate:
|
||||||
|
name: Reproducibility Gate
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [reproducibility-ubuntu, golden-baseline]
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check reproducibility status
|
||||||
|
run: |
|
||||||
|
ubuntu_status="${{ needs.reproducibility-ubuntu.result }}"
|
||||||
|
baseline_status="${{ needs.golden-baseline.result }}"
|
||||||
|
|
||||||
|
echo "Ubuntu E2E tests: $ubuntu_status"
|
||||||
|
echo "Golden baseline: $baseline_status"
|
||||||
|
|
||||||
|
if [ "$ubuntu_status" != "success" ]; then
|
||||||
|
echo "❌ E2E reproducibility tests failed!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$baseline_status" == "failure" ]; then
|
||||||
|
echo "⚠️ Golden baseline comparison failed (may require review)"
|
||||||
|
# Don't fail the gate for baseline mismatch - it may be intentional
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✅ Reproducibility gate passed!"
|
||||||
98
.gitea/workflows/epss-ingest-perf.yml
Normal file
98
.gitea/workflows/epss-ingest-perf.yml
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
name: EPSS Ingest Perf
|
||||||
|
|
||||||
|
# Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
|
||||||
|
# Tasks: EPSS-3410-013B, EPSS-3410-014
|
||||||
|
#
|
||||||
|
# Runs the EPSS ingest perf harness against a Dockerized PostgreSQL instance (Testcontainers).
|
||||||
|
#
|
||||||
|
# Runner requirements:
|
||||||
|
# - Linux runner with Docker Engine available to the runner user (Testcontainers).
|
||||||
|
# - Label: `ubuntu-22.04` (adjust `runs-on` if your labels differ).
|
||||||
|
# - >= 4 CPU / >= 8GB RAM recommended for stable baselines.
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
rows:
|
||||||
|
description: 'Row count to generate (default: 310000)'
|
||||||
|
required: false
|
||||||
|
default: '310000'
|
||||||
|
postgres_image:
|
||||||
|
description: 'PostgreSQL image (default: postgres:16-alpine)'
|
||||||
|
required: false
|
||||||
|
default: 'postgres:16-alpine'
|
||||||
|
schedule:
|
||||||
|
# Nightly at 03:00 UTC
|
||||||
|
- cron: '0 3 * * *'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Storage/**'
|
||||||
|
- 'src/Scanner/StellaOps.Scanner.Worker/**'
|
||||||
|
- 'src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/**'
|
||||||
|
- '.gitea/workflows/epss-ingest-perf.yml'
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Storage/**'
|
||||||
|
- 'src/Scanner/StellaOps.Scanner.Worker/**'
|
||||||
|
- 'src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/**'
|
||||||
|
- '.gitea/workflows/epss-ingest-perf.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
perf:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
STELLAOPS_OFFLINE: 'true'
|
||||||
|
STELLAOPS_DETERMINISTIC: 'true'
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET 10
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/.nuget/packages
|
||||||
|
key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.csproj') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-nuget-
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/StellaOps.Scanner.Storage.Epss.Perf.csproj \
|
||||||
|
--configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
dotnet build src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/StellaOps.Scanner.Storage.Epss.Perf.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-restore
|
||||||
|
|
||||||
|
- name: Run perf harness
|
||||||
|
run: |
|
||||||
|
mkdir -p bench/results
|
||||||
|
dotnet run \
|
||||||
|
--project src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/StellaOps.Scanner.Storage.Epss.Perf.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
-- \
|
||||||
|
--rows ${{ inputs.rows || '310000' }} \
|
||||||
|
--postgres-image '${{ inputs.postgres_image || 'postgres:16-alpine' }}' \
|
||||||
|
--output bench/results/epss-ingest-perf-${{ github.sha }}.json
|
||||||
|
|
||||||
|
- name: Upload results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: epss-ingest-perf-${{ github.sha }}
|
||||||
|
path: |
|
||||||
|
bench/results/epss-ingest-perf-${{ github.sha }}.json
|
||||||
|
retention-days: 90
|
||||||
86
.gitea/workflows/evidence-locker.yml
Normal file
86
.gitea/workflows/evidence-locker.yml
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
name: evidence-locker
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
retention_target:
|
||||||
|
description: "Retention days target"
|
||||||
|
required: false
|
||||||
|
default: "180"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-evidence-locker:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Emit retention summary
|
||||||
|
env:
|
||||||
|
RETENTION_TARGET: ${{ github.event.inputs.retention_target }}
|
||||||
|
run: |
|
||||||
|
echo "target_retention_days=${RETENTION_TARGET}" > out/evidence-locker/summary.txt
|
||||||
|
|
||||||
|
- name: Upload evidence locker summary
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: evidence-locker
|
||||||
|
path: out/evidence-locker/**
|
||||||
|
|
||||||
|
push-zastava-evidence:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: check-evidence-locker
|
||||||
|
env:
|
||||||
|
STAGED_DIR: evidence-locker/zastava/2025-12-02
|
||||||
|
MODULE_ROOT: docs/modules/zastava
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Package staged Zastava artefacts
|
||||||
|
run: |
|
||||||
|
test -d "$MODULE_ROOT" || { echo "missing $MODULE_ROOT" >&2; exit 1; }
|
||||||
|
tmpdir=$(mktemp -d)
|
||||||
|
rsync -a --relative \
|
||||||
|
"$MODULE_ROOT/SHA256SUMS" \
|
||||||
|
"$MODULE_ROOT/schemas/" \
|
||||||
|
"$MODULE_ROOT/exports/" \
|
||||||
|
"$MODULE_ROOT/thresholds.yaml" \
|
||||||
|
"$MODULE_ROOT/thresholds.yaml.dsse" \
|
||||||
|
"$MODULE_ROOT/kit/verify.sh" \
|
||||||
|
"$MODULE_ROOT/kit/README.md" \
|
||||||
|
"$MODULE_ROOT/kit/ed25519.pub" \
|
||||||
|
"$MODULE_ROOT/kit/zastava-kit.tzst" \
|
||||||
|
"$MODULE_ROOT/kit/zastava-kit.tzst.dsse" \
|
||||||
|
"$MODULE_ROOT/evidence/README.md" \
|
||||||
|
"$tmpdir/"
|
||||||
|
(cd "$tmpdir/docs/modules/zastava" && sha256sum --check SHA256SUMS)
|
||||||
|
tar --sort=name --mtime="UTC 1970-01-01" --owner=0 --group=0 --numeric-owner \
|
||||||
|
-cf /tmp/zastava-evidence.tar -C "$tmpdir/docs/modules/zastava" .
|
||||||
|
sha256sum /tmp/zastava-evidence.tar
|
||||||
|
|
||||||
|
- name: Upload staged artefacts (fallback)
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: zastava-evidence-locker-2025-12-02
|
||||||
|
path: /tmp/zastava-evidence.tar
|
||||||
|
|
||||||
|
- name: Push to Evidence Locker
|
||||||
|
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||||
|
env:
|
||||||
|
TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
|
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||||
|
run: |
|
||||||
|
curl -f -X PUT "$URL/zastava/2025-12-02/zastava-evidence.tar" \
|
||||||
|
-H "Authorization: Bearer $TOKEN" \
|
||||||
|
--data-binary @/tmp/zastava-evidence.tar
|
||||||
|
|
||||||
|
- name: Skip push (missing secret or URL)
|
||||||
|
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||||
|
run: |
|
||||||
|
echo "Locker push skipped: set CI_EVIDENCE_LOCKER_TOKEN and EVIDENCE_LOCKER_URL to enable." >&2
|
||||||
85
.gitea/workflows/export-ci.yml
Normal file
85
.gitea/workflows/export-ci.yml
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
name: Export Center CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/**'
|
||||||
|
- 'devops/export/**'
|
||||||
|
- '.gitea/workflows/export-ci.yml'
|
||||||
|
- 'docs/modules/devops/export-ci-contract.md'
|
||||||
|
pull_request:
|
||||||
|
branches: [ main, develop ]
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/**'
|
||||||
|
- 'devops/export/**'
|
||||||
|
- '.gitea/workflows/export-ci.yml'
|
||||||
|
- 'docs/modules/devops/export-ci-contract.md'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
export-ci:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
MINIO_ACCESS_KEY: exportci
|
||||||
|
MINIO_SECRET_KEY: exportci123
|
||||||
|
BUCKET: export-ci
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||||
|
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||||
|
|
||||||
|
- name: Set up .NET SDK
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj
|
||||||
|
|
||||||
|
- name: Bring up MinIO
|
||||||
|
run: |
|
||||||
|
docker compose -f devops/export/minio-compose.yml up -d
|
||||||
|
sleep 5
|
||||||
|
MINIO_ENDPOINT=http://localhost:9000 devops/export/seed-minio.sh
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj -c Release /p:ContinuousIntegrationBuild=true
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: |
|
||||||
|
mkdir -p $ARTIFACT_DIR
|
||||||
|
dotnet test src/ExportCenter/__Tests/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj -c Release --logger "trx;LogFileName=export-tests.trx" --results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Trivy/OCI smoke
|
||||||
|
run: devops/export/trivy-smoke.sh
|
||||||
|
|
||||||
|
- name: Schema lint
|
||||||
|
run: |
|
||||||
|
python -m json.tool docs/modules/export-center/schemas/export-profile.schema.json >/dev/null
|
||||||
|
python -m json.tool docs/modules/export-center/schemas/export-manifest.schema.json >/dev/null
|
||||||
|
|
||||||
|
- name: Offline kit verify (fixtures)
|
||||||
|
run: bash docs/modules/export-center/operations/verify-export-kit.sh src/ExportCenter/__fixtures/export-kit
|
||||||
|
|
||||||
|
- name: SBOM
|
||||||
|
run: syft dir:src/ExportCenter -o spdx-json=$ARTIFACT_DIR/exportcenter.spdx.json
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: export-ci-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
|
|
||||||
|
- name: Teardown MinIO
|
||||||
|
if: always()
|
||||||
|
run: docker compose -f devops/export/minio-compose.yml down -v
|
||||||
41
.gitea/workflows/export-compat.yml
Normal file
41
.gitea/workflows/export-compat.yml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: export-compat
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
image:
|
||||||
|
description: "Exporter image ref"
|
||||||
|
required: true
|
||||||
|
default: "ghcr.io/stella-ops/exporter:edge"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
compat:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup Trivy
|
||||||
|
uses: aquasecurity/trivy-action@v0.24.0
|
||||||
|
with:
|
||||||
|
version: latest
|
||||||
|
|
||||||
|
- name: Setup Cosign
|
||||||
|
uses: sigstore/cosign-installer@v3.6.0
|
||||||
|
|
||||||
|
- name: Run compatibility checks
|
||||||
|
env:
|
||||||
|
IMAGE: ${{ github.event.inputs.image }}
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/export/trivy-compat.sh
|
||||||
|
chmod +x scripts/export/oci-verify.sh
|
||||||
|
scripts/export/trivy-compat.sh
|
||||||
|
scripts/export/oci-verify.sh
|
||||||
|
|
||||||
|
- name: Upload reports
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: export-compat
|
||||||
|
path: out/export-compat/**
|
||||||
46
.gitea/workflows/exporter-ci.yml
Normal file
46
.gitea/workflows/exporter-ci.yml
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
name: exporter-ci
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/**'
|
||||||
|
- '.gitea/workflows/exporter-ci.yml'
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.x'
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj --configuration Release --no-restore
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: dotnet test src/ExportCenter/__Tests/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj --configuration Release --no-build --verbosity normal
|
||||||
|
|
||||||
|
- name: Publish
|
||||||
|
run: |
|
||||||
|
dotnet publish src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj \
|
||||||
|
--configuration Release \
|
||||||
|
--output artifacts/exporter
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: exporter-${{ github.run_id }}
|
||||||
|
path: artifacts/
|
||||||
|
retention-days: 14
|
||||||
325
.gitea/workflows/findings-ledger-ci.yml
Normal file
325
.gitea/workflows/findings-ledger-ci.yml
Normal file
@@ -0,0 +1,325 @@
|
|||||||
|
# .gitea/workflows/findings-ledger-ci.yml
|
||||||
|
# Findings Ledger CI with RLS migration validation (DEVOPS-LEDGER-TEN-48-001-REL)
|
||||||
|
|
||||||
|
name: Findings Ledger CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Findings/**'
|
||||||
|
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||||
|
- 'devops/releases/2025.09-stable.yaml'
|
||||||
|
- 'devops/releases/2025.09-airgap.yaml'
|
||||||
|
- 'devops/downloads/manifest.json'
|
||||||
|
- 'devops/release/check_release_manifest.py'
|
||||||
|
pull_request:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'src/Findings/**'
|
||||||
|
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
POSTGRES_IMAGE: postgres:16-alpine
|
||||||
|
BUILD_CONFIGURATION: Release
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-test:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
TEST_RESULTS_DIR: ${{ github.workspace }}/artifacts/test-results
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj
|
||||||
|
dotnet restore src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
dotnet build src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj \
|
||||||
|
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||||
|
/p:ContinuousIntegrationBuild=true
|
||||||
|
|
||||||
|
- name: Run unit tests
|
||||||
|
run: |
|
||||||
|
mkdir -p $TEST_RESULTS_DIR
|
||||||
|
dotnet test src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj \
|
||||||
|
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||||
|
--logger "trx;LogFileName=ledger-tests.trx" \
|
||||||
|
--results-directory $TEST_RESULTS_DIR
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: ledger-test-results
|
||||||
|
path: ${{ env.TEST_RESULTS_DIR }}
|
||||||
|
|
||||||
|
migration-validation:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: ledgertest
|
||||||
|
POSTGRES_PASSWORD: ledgertest
|
||||||
|
POSTGRES_DB: ledger_test
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
env:
|
||||||
|
PGHOST: localhost
|
||||||
|
PGPORT: 5432
|
||||||
|
PGUSER: ledgertest
|
||||||
|
PGPASSWORD: ledgertest
|
||||||
|
PGDATABASE: ledger_test
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Install PostgreSQL client
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y postgresql-client
|
||||||
|
|
||||||
|
- name: Wait for PostgreSQL
|
||||||
|
run: |
|
||||||
|
until pg_isready -h $PGHOST -p $PGPORT -U $PGUSER; do
|
||||||
|
echo "Waiting for PostgreSQL..."
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Apply prerequisite migrations (001-006)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
MIGRATION_DIR="src/Findings/StellaOps.Findings.Ledger/migrations"
|
||||||
|
for migration in 001_initial.sql 002_add_evidence_bundle_ref.sql 002_projection_offsets.sql \
|
||||||
|
003_policy_rationale.sql 004_ledger_attestations.sql 004_risk_fields.sql \
|
||||||
|
005_risk_fields.sql 006_orchestrator_airgap.sql; do
|
||||||
|
if [ -f "$MIGRATION_DIR/$migration" ]; then
|
||||||
|
echo "Applying migration: $migration"
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f "$MIGRATION_DIR/$migration"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Apply RLS migration (007_enable_rls.sql)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Applying RLS migration..."
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||||
|
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||||
|
|
||||||
|
- name: Validate RLS configuration
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Validating RLS is enabled on all protected tables..."
|
||||||
|
|
||||||
|
# Check RLS enabled
|
||||||
|
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM pg_class c
|
||||||
|
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||||
|
WHERE n.nspname = 'public'
|
||||||
|
AND c.relrowsecurity = true
|
||||||
|
AND c.relname IN (
|
||||||
|
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||||
|
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||||
|
'orchestrator_exports', 'airgap_imports'
|
||||||
|
);
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$TABLES_WITH_RLS" -ne 8 ]; then
|
||||||
|
echo "::error::Expected 8 tables with RLS enabled, found $TABLES_WITH_RLS"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ All 8 tables have RLS enabled"
|
||||||
|
|
||||||
|
# Check policies exist
|
||||||
|
POLICIES=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(DISTINCT tablename)
|
||||||
|
FROM pg_policies
|
||||||
|
WHERE schemaname = 'public'
|
||||||
|
AND policyname LIKE '%_tenant_isolation';
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$POLICIES" -ne 8 ]; then
|
||||||
|
echo "::error::Expected 8 tenant isolation policies, found $POLICIES"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ All 8 tenant isolation policies created"
|
||||||
|
|
||||||
|
# Check tenant function exists
|
||||||
|
FUNC_EXISTS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM pg_proc p
|
||||||
|
JOIN pg_namespace n ON p.pronamespace = n.oid
|
||||||
|
WHERE p.proname = 'require_current_tenant'
|
||||||
|
AND n.nspname = 'findings_ledger_app';
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$FUNC_EXISTS" -ne 1 ]; then
|
||||||
|
echo "::error::Tenant function 'require_current_tenant' not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ Tenant function 'findings_ledger_app.require_current_tenant()' exists"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== RLS Migration Validation PASSED ==="
|
||||||
|
|
||||||
|
- name: Test rollback migration
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Testing rollback migration..."
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||||
|
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql
|
||||||
|
|
||||||
|
# Verify RLS is disabled
|
||||||
|
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM pg_class c
|
||||||
|
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||||
|
WHERE n.nspname = 'public'
|
||||||
|
AND c.relrowsecurity = true
|
||||||
|
AND c.relname IN (
|
||||||
|
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||||
|
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||||
|
'orchestrator_exports', 'airgap_imports'
|
||||||
|
);
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$TABLES_WITH_RLS" -ne 0 ]; then
|
||||||
|
echo "::error::Rollback failed - $TABLES_WITH_RLS tables still have RLS enabled"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ Rollback successful - RLS disabled on all tables"
|
||||||
|
- name: Validate release manifests (production)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
python devops/release/check_release_manifest.py
|
||||||
|
|
||||||
|
- name: Re-apply RLS migration (idempotency check)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Re-applying RLS migration to verify idempotency..."
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||||
|
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||||
|
echo "✓ Migration is idempotent"
|
||||||
|
|
||||||
|
generate-manifest:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [build-test, migration-validation]
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Generate migration manifest
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
MIGRATION_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql"
|
||||||
|
ROLLBACK_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql"
|
||||||
|
MANIFEST_DIR="out/findings-ledger/migrations"
|
||||||
|
mkdir -p "$MANIFEST_DIR"
|
||||||
|
|
||||||
|
# Compute SHA256 hashes
|
||||||
|
MIGRATION_SHA=$(sha256sum "$MIGRATION_FILE" | awk '{print $1}')
|
||||||
|
ROLLBACK_SHA=$(sha256sum "$ROLLBACK_FILE" | awk '{print $1}')
|
||||||
|
CREATED_AT=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
|
||||||
|
cat > "$MANIFEST_DIR/007_enable_rls.manifest.json" <<EOF
|
||||||
|
{
|
||||||
|
"\$schema": "https://stella-ops.org/schemas/migration-manifest.v1.json",
|
||||||
|
"schemaVersion": "1.0.0",
|
||||||
|
"migrationId": "007_enable_rls",
|
||||||
|
"module": "findings-ledger",
|
||||||
|
"version": "2025.12.0",
|
||||||
|
"createdAt": "$CREATED_AT",
|
||||||
|
"description": "Enable Row-Level Security for Findings Ledger tenant isolation",
|
||||||
|
"taskId": "LEDGER-TEN-48-001-DEV",
|
||||||
|
"contractRef": "CONTRACT-FINDINGS-LEDGER-RLS-011",
|
||||||
|
"database": {
|
||||||
|
"engine": "postgresql",
|
||||||
|
"minVersion": "16.0"
|
||||||
|
},
|
||||||
|
"files": {
|
||||||
|
"apply": {
|
||||||
|
"path": "007_enable_rls.sql",
|
||||||
|
"sha256": "$MIGRATION_SHA"
|
||||||
|
},
|
||||||
|
"rollback": {
|
||||||
|
"path": "007_enable_rls_rollback.sql",
|
||||||
|
"sha256": "$ROLLBACK_SHA"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"affects": {
|
||||||
|
"tables": [
|
||||||
|
"ledger_events",
|
||||||
|
"ledger_merkle_roots",
|
||||||
|
"findings_projection",
|
||||||
|
"finding_history",
|
||||||
|
"triage_actions",
|
||||||
|
"ledger_attestations",
|
||||||
|
"orchestrator_exports",
|
||||||
|
"airgap_imports"
|
||||||
|
],
|
||||||
|
"schemas": ["public", "findings_ledger_app"],
|
||||||
|
"roles": ["findings_ledger_admin"]
|
||||||
|
},
|
||||||
|
"prerequisites": [
|
||||||
|
"006_orchestrator_airgap"
|
||||||
|
],
|
||||||
|
"validation": {
|
||||||
|
"type": "rls-check",
|
||||||
|
"expectedTables": 8,
|
||||||
|
"expectedPolicies": 8,
|
||||||
|
"tenantFunction": "findings_ledger_app.require_current_tenant"
|
||||||
|
},
|
||||||
|
"offlineKit": {
|
||||||
|
"includedInBundle": true,
|
||||||
|
"requiresManualApply": true,
|
||||||
|
"applyOrder": 7
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Generated migration manifest at $MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||||
|
cat "$MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||||
|
|
||||||
|
- name: Copy migration files for offline-kit
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
OFFLINE_DIR="out/findings-ledger/offline-kit/migrations"
|
||||||
|
mkdir -p "$OFFLINE_DIR"
|
||||||
|
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql "$OFFLINE_DIR/"
|
||||||
|
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql "$OFFLINE_DIR/"
|
||||||
|
cp out/findings-ledger/migrations/007_enable_rls.manifest.json "$OFFLINE_DIR/"
|
||||||
|
echo "Offline-kit migration files prepared"
|
||||||
|
ls -la "$OFFLINE_DIR"
|
||||||
|
|
||||||
|
- name: Upload migration artefacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: findings-ledger-migrations
|
||||||
|
path: out/findings-ledger/
|
||||||
|
if-no-files-found: error
|
||||||
42
.gitea/workflows/graph-load.yml
Normal file
42
.gitea/workflows/graph-load.yml
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
name: graph-load
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
target:
|
||||||
|
description: "Graph API base URL"
|
||||||
|
required: true
|
||||||
|
default: "http://localhost:5000"
|
||||||
|
users:
|
||||||
|
description: "Virtual users"
|
||||||
|
required: false
|
||||||
|
default: "8"
|
||||||
|
duration:
|
||||||
|
description: "Duration seconds"
|
||||||
|
required: false
|
||||||
|
default: "60"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
load-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Install k6
|
||||||
|
run: |
|
||||||
|
sudo apt-get update -qq
|
||||||
|
sudo apt-get install -y k6
|
||||||
|
|
||||||
|
- name: Run graph load test
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/graph/load-test.sh
|
||||||
|
TARGET="${{ github.event.inputs.target }}" USERS="${{ github.event.inputs.users }}" DURATION="${{ github.event.inputs.duration }}" scripts/graph/load-test.sh
|
||||||
|
|
||||||
|
- name: Upload results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: graph-load-summary
|
||||||
|
path: out/graph-load/**
|
||||||
57
.gitea/workflows/graph-ui-sim.yml
Normal file
57
.gitea/workflows/graph-ui-sim.yml
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
name: graph-ui-sim
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
graph_api:
|
||||||
|
description: "Graph API base URL"
|
||||||
|
required: true
|
||||||
|
default: "http://localhost:5000"
|
||||||
|
graph_ui:
|
||||||
|
description: "Graph UI base URL"
|
||||||
|
required: true
|
||||||
|
default: "http://localhost:4200"
|
||||||
|
perf_budget_ms:
|
||||||
|
description: "Perf budget in ms"
|
||||||
|
required: false
|
||||||
|
default: "3000"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
ui-and-sim:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup Node
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: "18"
|
||||||
|
|
||||||
|
- name: Install Playwright deps
|
||||||
|
run: npx playwright install --with-deps chromium
|
||||||
|
|
||||||
|
- name: Run UI perf probe
|
||||||
|
env:
|
||||||
|
GRAPH_UI_BASE: ${{ github.event.inputs.graph_ui }}
|
||||||
|
GRAPH_UI_BUDGET_MS: ${{ github.event.inputs.perf_budget_ms }}
|
||||||
|
OUT: out/graph-ui-perf
|
||||||
|
run: |
|
||||||
|
npx ts-node scripts/graph/ui-perf.ts
|
||||||
|
|
||||||
|
- name: Run simulation smoke
|
||||||
|
env:
|
||||||
|
TARGET: ${{ github.event.inputs.graph_api }}
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/graph/simulation-smoke.sh
|
||||||
|
scripts/graph/simulation-smoke.sh
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: graph-ui-sim
|
||||||
|
path: |
|
||||||
|
out/graph-ui-perf/**
|
||||||
|
out/graph-sim/**
|
||||||
68
.gitea/workflows/icscisa-kisa-refresh.yml
Normal file
68
.gitea/workflows/icscisa-kisa-refresh.yml
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
name: ICS/KISA Feed Refresh
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 2 * * MON'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
live_fetch:
|
||||||
|
description: 'Attempt live RSS fetch (fallback to samples on failure)'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
offline_snapshot:
|
||||||
|
description: 'Force offline samples only (no network)'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
refresh:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
ICSCISA_FEED_URL: ${{ secrets.ICSCISA_FEED_URL }}
|
||||||
|
KISA_FEED_URL: ${{ secrets.KISA_FEED_URL }}
|
||||||
|
FEED_GATEWAY_HOST: concelier-webservice
|
||||||
|
FEED_GATEWAY_SCHEME: http
|
||||||
|
LIVE_FETCH: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.live_fetch || 'true' }}
|
||||||
|
OFFLINE_SNAPSHOT: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.offline_snapshot || 'false' }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set run metadata
|
||||||
|
id: meta
|
||||||
|
run: |
|
||||||
|
RUN_DATE=$(date -u +%Y%m%d)
|
||||||
|
RUN_ID="icscisa-kisa-$(date -u +%Y%m%dT%H%M%SZ)"
|
||||||
|
echo "run_date=$RUN_DATE" >> $GITHUB_OUTPUT
|
||||||
|
echo "run_id=$RUN_ID" >> $GITHUB_OUTPUT
|
||||||
|
echo "RUN_DATE=$RUN_DATE" >> $GITHUB_ENV
|
||||||
|
echo "RUN_ID=$RUN_ID" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Run ICS/KISA refresh
|
||||||
|
run: |
|
||||||
|
python scripts/feeds/run_icscisa_kisa_refresh.py \
|
||||||
|
--out-dir out/feeds/icscisa-kisa \
|
||||||
|
--run-date "${{ steps.meta.outputs.run_date }}" \
|
||||||
|
--run-id "${{ steps.meta.outputs.run_id }}"
|
||||||
|
|
||||||
|
- name: Show fetch log
|
||||||
|
run: cat out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}/fetch.log
|
||||||
|
|
||||||
|
- name: Upload refresh artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: icscisa-kisa-${{ steps.meta.outputs.run_date }}
|
||||||
|
path: out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 21
|
||||||
375
.gitea/workflows/integration-tests-gate.yml
Normal file
375
.gitea/workflows/integration-tests-gate.yml
Normal file
@@ -0,0 +1,375 @@
|
|||||||
|
# Sprint 3500.0004.0003 - T6: Integration Tests CI Gate
|
||||||
|
# Runs integration tests on PR and gates merges on failures
|
||||||
|
|
||||||
|
name: integration-tests-gate
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'src/**'
|
||||||
|
- 'src/__Tests/Integration/**'
|
||||||
|
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
run_performance:
|
||||||
|
description: 'Run performance baseline tests'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
run_airgap:
|
||||||
|
description: 'Run air-gap tests'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: integration-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# ==========================================================================
|
||||||
|
# T6-AC1: Integration tests run on PR
|
||||||
|
# ==========================================================================
|
||||||
|
integration-tests:
|
||||||
|
name: Integration Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: stellaops
|
||||||
|
POSTGRES_PASSWORD: test-only
|
||||||
|
POSTGRES_DB: stellaops_test
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/Integration/**/*.csproj
|
||||||
|
|
||||||
|
- name: Build integration tests
|
||||||
|
run: dotnet build src/__Tests/Integration/**/*.csproj --configuration Release --no-restore
|
||||||
|
|
||||||
|
- name: Run Proof Chain Tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.ProofChain \
|
||||||
|
--configuration Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=proofchain.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
env:
|
||||||
|
ConnectionStrings__StellaOps: "Host=localhost;Database=stellaops_test;Username=stellaops;Password=test-only"
|
||||||
|
|
||||||
|
- name: Run Reachability Tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Reachability \
|
||||||
|
--configuration Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=reachability.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Run Unknowns Workflow Tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Unknowns \
|
||||||
|
--configuration Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=unknowns.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Run Determinism Tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||||
|
--configuration Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=determinism.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: integration-test-results
|
||||||
|
path: TestResults/**/*.trx
|
||||||
|
|
||||||
|
- name: Publish test summary
|
||||||
|
uses: dorny/test-reporter@v1
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: Integration Test Results
|
||||||
|
path: TestResults/**/*.trx
|
||||||
|
reporter: dotnet-trx
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# T6-AC2: Corpus validation on release branch
|
||||||
|
# ==========================================================================
|
||||||
|
corpus-validation:
|
||||||
|
name: Golden Corpus Validation
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.ref == 'refs/heads/main' || github.event_name == 'workflow_dispatch'
|
||||||
|
timeout-minutes: 15
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Validate corpus manifest
|
||||||
|
run: |
|
||||||
|
python3 -c "
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
|
||||||
|
manifest_path = 'src/__Tests/__Benchmarks/golden-corpus/corpus-manifest.json'
|
||||||
|
with open(manifest_path) as f:
|
||||||
|
manifest = json.load(f)
|
||||||
|
|
||||||
|
print(f'Corpus version: {manifest.get(\"corpus_version\", \"unknown\")}')
|
||||||
|
print(f'Total cases: {manifest.get(\"total_cases\", 0)}')
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for case in manifest.get('cases', []):
|
||||||
|
case_path = os.path.join('src/__Tests/__Benchmarks/golden-corpus', case['path'])
|
||||||
|
if not os.path.isdir(case_path):
|
||||||
|
errors.append(f'Missing case directory: {case_path}')
|
||||||
|
else:
|
||||||
|
required_files = ['case.json', 'expected-score.json']
|
||||||
|
for f in required_files:
|
||||||
|
if not os.path.exists(os.path.join(case_path, f)):
|
||||||
|
errors.append(f'Missing file: {case_path}/{f}')
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
print('\\nValidation errors:')
|
||||||
|
for e in errors:
|
||||||
|
print(f' - {e}')
|
||||||
|
exit(1)
|
||||||
|
else:
|
||||||
|
print('\\nCorpus validation passed!')
|
||||||
|
"
|
||||||
|
|
||||||
|
- name: Run corpus scoring tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||||
|
--filter "Category=GoldenCorpus" \
|
||||||
|
--configuration Release \
|
||||||
|
--logger "trx;LogFileName=corpus.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# T6-AC3: Determinism tests on nightly
|
||||||
|
# ==========================================================================
|
||||||
|
nightly-determinism:
|
||||||
|
name: Nightly Determinism Check
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_performance == 'true')
|
||||||
|
timeout-minutes: 45
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Run full determinism suite
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||||
|
--configuration Release \
|
||||||
|
--logger "trx;LogFileName=determinism-full.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Run cross-run determinism check
|
||||||
|
run: |
|
||||||
|
# Run scoring 3 times and compare hashes
|
||||||
|
for i in 1 2 3; do
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||||
|
--filter "FullyQualifiedName~IdenticalInput_ProducesIdenticalHash" \
|
||||||
|
--results-directory ./TestResults/run-$i
|
||||||
|
done
|
||||||
|
|
||||||
|
# Compare all results
|
||||||
|
echo "Comparing determinism across runs..."
|
||||||
|
|
||||||
|
- name: Upload determinism results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: nightly-determinism-results
|
||||||
|
path: TestResults/**
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# T6-AC4: Test coverage reported to dashboard
|
||||||
|
# ==========================================================================
|
||||||
|
coverage-report:
|
||||||
|
name: Coverage Report
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [integration-tests]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Run tests with coverage
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/**/*.csproj \
|
||||||
|
--configuration Release \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--results-directory ./TestResults/Coverage
|
||||||
|
|
||||||
|
- name: Generate coverage report
|
||||||
|
uses: danielpalme/ReportGenerator-GitHub-Action@5.2.0
|
||||||
|
with:
|
||||||
|
reports: TestResults/Coverage/**/coverage.cobertura.xml
|
||||||
|
targetdir: TestResults/CoverageReport
|
||||||
|
reporttypes: 'Html;Cobertura;MarkdownSummary'
|
||||||
|
|
||||||
|
- name: Upload coverage report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: coverage-report
|
||||||
|
path: TestResults/CoverageReport/**
|
||||||
|
|
||||||
|
- name: Add coverage to PR comment
|
||||||
|
uses: marocchino/sticky-pull-request-comment@v2
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
with:
|
||||||
|
recreate: true
|
||||||
|
path: TestResults/CoverageReport/Summary.md
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# T6-AC5: Flaky test quarantine process
|
||||||
|
# ==========================================================================
|
||||||
|
flaky-test-check:
|
||||||
|
name: Flaky Test Detection
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [integration-tests]
|
||||||
|
if: failure()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check for known flaky tests
|
||||||
|
run: |
|
||||||
|
# Check if failure is from a known flaky test
|
||||||
|
QUARANTINE_FILE=".github/flaky-tests-quarantine.json"
|
||||||
|
if [ -f "$QUARANTINE_FILE" ]; then
|
||||||
|
echo "Checking against quarantine list..."
|
||||||
|
# Implementation would compare failed tests against quarantine
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create flaky test issue
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
// After 2 consecutive failures, create issue for quarantine review
|
||||||
|
console.log('Checking for flaky test patterns...');
|
||||||
|
// Implementation would analyze test history
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# Performance Tests (optional, on demand)
|
||||||
|
# ==========================================================================
|
||||||
|
performance-tests:
|
||||||
|
name: Performance Baseline Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'workflow_dispatch' && github.event.inputs.run_performance == 'true'
|
||||||
|
timeout-minutes: 30
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Run performance tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Performance \
|
||||||
|
--configuration Release \
|
||||||
|
--logger "trx;LogFileName=performance.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Upload performance report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: performance-report
|
||||||
|
path: |
|
||||||
|
TestResults/**
|
||||||
|
src/__Tests/Integration/StellaOps.Integration.Performance/output/**
|
||||||
|
|
||||||
|
- name: Check for regressions
|
||||||
|
run: |
|
||||||
|
# Check if any test exceeded 20% threshold
|
||||||
|
if [ -f "src/__Tests/Integration/StellaOps.Integration.Performance/output/performance-report.json" ]; then
|
||||||
|
python3 -c "
|
||||||
|
import json
|
||||||
|
with open('src/__Tests/Integration/StellaOps.Integration.Performance/output/performance-report.json') as f:
|
||||||
|
report = json.load(f)
|
||||||
|
regressions = [m for m in report.get('Metrics', []) if m.get('DeltaPercent', 0) > 20]
|
||||||
|
if regressions:
|
||||||
|
print('Performance regressions detected!')
|
||||||
|
for r in regressions:
|
||||||
|
print(f' {r[\"Name\"]}: +{r[\"DeltaPercent\"]:.1f}%')
|
||||||
|
exit(1)
|
||||||
|
print('No performance regressions detected.')
|
||||||
|
"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# Air-Gap Tests (optional, on demand)
|
||||||
|
# ==========================================================================
|
||||||
|
airgap-tests:
|
||||||
|
name: Air-Gap Integration Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'workflow_dispatch' && github.event.inputs.run_airgap == 'true'
|
||||||
|
timeout-minutes: 30
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Run air-gap tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.AirGap \
|
||||||
|
--configuration Release \
|
||||||
|
--logger "trx;LogFileName=airgap.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Upload air-gap test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: airgap-test-results
|
||||||
|
path: TestResults/**
|
||||||
128
.gitea/workflows/interop-e2e.yml
Normal file
128
.gitea/workflows/interop-e2e.yml
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
name: Interop E2E Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/**'
|
||||||
|
- 'src/Excititor/**'
|
||||||
|
- 'src/__Tests/interop/**'
|
||||||
|
schedule:
|
||||||
|
- cron: '0 6 * * *' # Nightly at 6 AM UTC
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
interop-tests:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
format: [cyclonedx, spdx]
|
||||||
|
arch: [amd64]
|
||||||
|
include:
|
||||||
|
- format: cyclonedx
|
||||||
|
format_flag: cyclonedx-json
|
||||||
|
- format: spdx
|
||||||
|
format_flag: spdx-json
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Syft
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin
|
||||||
|
syft --version
|
||||||
|
|
||||||
|
- name: Install Grype
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin
|
||||||
|
grype --version
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://github.com/sigstore/cosign/releases/latest/download/cosign-linux-amd64 -o /usr/local/bin/cosign
|
||||||
|
chmod +x /usr/local/bin/cosign
|
||||||
|
cosign version
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/StellaOps.sln
|
||||||
|
|
||||||
|
- name: Build Stella CLI
|
||||||
|
run: dotnet build src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -c Release
|
||||||
|
|
||||||
|
- name: Build interop tests
|
||||||
|
run: dotnet build src/__Tests/interop/StellaOps.Interop.Tests/StellaOps.Interop.Tests.csproj
|
||||||
|
|
||||||
|
- name: Run interop tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/interop/StellaOps.Interop.Tests \
|
||||||
|
--filter "Format=${{ matrix.format }}" \
|
||||||
|
--logger "trx;LogFileName=interop-${{ matrix.format }}.trx" \
|
||||||
|
--logger "console;verbosity=detailed" \
|
||||||
|
--results-directory ./results \
|
||||||
|
-- RunConfiguration.TestSessionTimeout=900000
|
||||||
|
|
||||||
|
- name: Generate parity report
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
# TODO: Generate parity report from test results
|
||||||
|
echo '{"format": "${{ matrix.format }}", "parityPercent": 0}' > ./results/parity-report-${{ matrix.format }}.json
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: interop-test-results-${{ matrix.format }}
|
||||||
|
path: ./results/
|
||||||
|
|
||||||
|
- name: Check parity threshold
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
PARITY=$(jq '.parityPercent' ./results/parity-report-${{ matrix.format }}.json 2>/dev/null || echo "0")
|
||||||
|
echo "Parity for ${{ matrix.format }}: ${PARITY}%"
|
||||||
|
|
||||||
|
if (( $(echo "$PARITY < 95" | bc -l 2>/dev/null || echo "1") )); then
|
||||||
|
echo "::warning::Findings parity ${PARITY}% is below 95% threshold for ${{ matrix.format }}"
|
||||||
|
# Don't fail the build yet - this is initial implementation
|
||||||
|
# exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
summary:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: interop-tests
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Download all artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: ./all-results
|
||||||
|
|
||||||
|
- name: Generate summary
|
||||||
|
run: |
|
||||||
|
echo "## Interop Test Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Format | Status |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
for format in cyclonedx spdx; do
|
||||||
|
if [ -f "./all-results/interop-test-results-${format}/parity-report-${format}.json" ]; then
|
||||||
|
PARITY=$(jq -r '.parityPercent // 0' "./all-results/interop-test-results-${format}/parity-report-${format}.json")
|
||||||
|
if (( $(echo "$PARITY >= 95" | bc -l 2>/dev/null || echo "0") )); then
|
||||||
|
STATUS="✅ Pass (${PARITY}%)"
|
||||||
|
else
|
||||||
|
STATUS="⚠️ Below threshold (${PARITY}%)"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
STATUS="❌ No results"
|
||||||
|
fi
|
||||||
|
echo "| ${format} | ${STATUS} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
done
|
||||||
81
.gitea/workflows/ledger-oas-ci.yml
Normal file
81
.gitea/workflows/ledger-oas-ci.yml
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
name: Ledger OpenAPI CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'api/ledger/**'
|
||||||
|
- 'devops/ledger/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'api/ledger/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-oas:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install tools
|
||||||
|
run: |
|
||||||
|
npm install -g @stoplight/spectral-cli
|
||||||
|
npm install -g @openapitools/openapi-generator-cli
|
||||||
|
|
||||||
|
- name: Validate OpenAPI spec
|
||||||
|
run: |
|
||||||
|
chmod +x devops/ledger/validate-oas.sh
|
||||||
|
devops/ledger/validate-oas.sh
|
||||||
|
|
||||||
|
- name: Upload validation report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-oas-validation-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/ledger/oas/lint-report.json
|
||||||
|
out/ledger/oas/validation-report.txt
|
||||||
|
out/ledger/oas/spec-summary.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
|
||||||
|
check-wellknown:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: validate-oas
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check .well-known/openapi structure
|
||||||
|
run: |
|
||||||
|
# Validate .well-known structure if exists
|
||||||
|
if [ -d ".well-known" ]; then
|
||||||
|
echo "Checking .well-known/openapi..."
|
||||||
|
if [ -f ".well-known/openapi.json" ]; then
|
||||||
|
python3 -c "import json; json.load(open('.well-known/openapi.json'))"
|
||||||
|
echo ".well-known/openapi.json is valid JSON"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "[info] .well-known directory not present (OK for dev)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
deprecation-check:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: validate-oas
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check deprecation policy
|
||||||
|
run: |
|
||||||
|
if [ -f "devops/ledger/deprecation-policy.yaml" ]; then
|
||||||
|
echo "Validating deprecation policy..."
|
||||||
|
python3 -c "import yaml; yaml.safe_load(open('devops/ledger/deprecation-policy.yaml'))"
|
||||||
|
echo "Deprecation policy is valid"
|
||||||
|
else
|
||||||
|
echo "[info] No deprecation policy yet (OK for initial setup)"
|
||||||
|
fi
|
||||||
101
.gitea/workflows/ledger-packs-ci.yml
Normal file
101
.gitea/workflows/ledger-packs-ci.yml
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
name: Ledger Packs CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
snapshot_id:
|
||||||
|
description: 'Snapshot ID (leave empty for auto)'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
sign:
|
||||||
|
description: 'Sign pack (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'devops/ledger/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-pack:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ] || [ "${{ github.event.inputs.sign }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build pack
|
||||||
|
run: |
|
||||||
|
chmod +x devops/ledger/build-pack.sh
|
||||||
|
SNAPSHOT_ID="${{ github.event.inputs.snapshot_id }}"
|
||||||
|
if [ -z "$SNAPSHOT_ID" ]; then
|
||||||
|
SNAPSHOT_ID="ci-$(date +%Y%m%d%H%M%S)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
SIGN_FLAG=""
|
||||||
|
if [ "${{ github.event.inputs.sign }}" = "1" ] || [ -n "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
SIGN_FLAG="--sign"
|
||||||
|
fi
|
||||||
|
|
||||||
|
SNAPSHOT_ID="$SNAPSHOT_ID" devops/ledger/build-pack.sh $SIGN_FLAG
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/ledger/packs
|
||||||
|
for f in *.SHA256SUMS; do
|
||||||
|
if [ -f "$f" ]; then
|
||||||
|
sha256sum -c "$f"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Upload pack
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-pack-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/ledger/packs/*.pack.tar.gz
|
||||||
|
out/ledger/packs/*.SHA256SUMS
|
||||||
|
out/ledger/packs/*.dsse.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
verify-pack:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-pack
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download pack
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-pack-${{ github.run_number }}
|
||||||
|
path: out/ledger/packs/
|
||||||
|
|
||||||
|
- name: Verify pack structure
|
||||||
|
run: |
|
||||||
|
cd out/ledger/packs
|
||||||
|
for pack in *.pack.tar.gz; do
|
||||||
|
if [ -f "$pack" ]; then
|
||||||
|
echo "Verifying $pack..."
|
||||||
|
tar -tzf "$pack" | head -20
|
||||||
|
|
||||||
|
# Extract and check manifest
|
||||||
|
tar -xzf "$pack" -C /tmp manifest.json 2>/dev/null || true
|
||||||
|
if [ -f /tmp/manifest.json ]; then
|
||||||
|
python3 -c "import json; json.load(open('/tmp/manifest.json'))"
|
||||||
|
echo "Pack manifest is valid JSON"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
299
.gitea/workflows/license-audit.yml
Normal file
299
.gitea/workflows/license-audit.yml
Normal file
@@ -0,0 +1,299 @@
|
|||||||
|
name: License Audit
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**/*.csproj'
|
||||||
|
- '**/package.json'
|
||||||
|
- '**/package-lock.json'
|
||||||
|
- 'Directory.Build.props'
|
||||||
|
- 'Directory.Packages.props'
|
||||||
|
- 'NOTICE.md'
|
||||||
|
- 'third-party-licenses/**'
|
||||||
|
- 'docs/legal/**'
|
||||||
|
- '.gitea/workflows/license-audit.yml'
|
||||||
|
- '.gitea/scripts/validate/validate-licenses.sh'
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- '**/*.csproj'
|
||||||
|
- '**/package.json'
|
||||||
|
- '**/package-lock.json'
|
||||||
|
- 'Directory.Build.props'
|
||||||
|
- 'Directory.Packages.props'
|
||||||
|
schedule:
|
||||||
|
# Weekly audit every Sunday at 00:00 UTC
|
||||||
|
- cron: '0 0 * * 0'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
full_scan:
|
||||||
|
description: 'Run full transitive dependency scan'
|
||||||
|
required: false
|
||||||
|
default: 'false'
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
nuget-license-audit:
|
||||||
|
name: NuGet License Audit
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
|
- name: Setup .NET 10
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.nuget/packages
|
||||||
|
.nuget/packages
|
||||||
|
key: license-audit-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||||
|
|
||||||
|
- name: Install dotnet-delice
|
||||||
|
run: dotnet tool install --global dotnet-delice || true
|
||||||
|
|
||||||
|
- name: Extract NuGet licenses
|
||||||
|
run: |
|
||||||
|
mkdir -p out/license-audit
|
||||||
|
|
||||||
|
# List packages from key projects
|
||||||
|
for proj in \
|
||||||
|
src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj \
|
||||||
|
src/Cli/StellaOps.Cli/StellaOps.Cli.csproj \
|
||||||
|
src/Authority/StellaOps.Authority/StellaOps.Authority.WebService/StellaOps.Authority.WebService.csproj \
|
||||||
|
src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj
|
||||||
|
do
|
||||||
|
if [ -f "$proj" ]; then
|
||||||
|
name=$(basename $(dirname "$proj"))
|
||||||
|
echo "Scanning: $proj"
|
||||||
|
dotnet list "$proj" package --include-transitive 2>/dev/null | tee -a out/license-audit/nuget-packages.txt || true
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Validate against allowlist
|
||||||
|
run: |
|
||||||
|
bash .gitea/scripts/validate/validate-licenses.sh nuget out/license-audit/nuget-packages.txt
|
||||||
|
|
||||||
|
- name: Upload NuGet license report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: nuget-license-report
|
||||||
|
path: out/license-audit
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
npm-license-audit:
|
||||||
|
name: npm License Audit
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
|
- name: Install license-checker
|
||||||
|
run: npm install -g license-checker
|
||||||
|
|
||||||
|
- name: Audit Angular frontend
|
||||||
|
run: |
|
||||||
|
mkdir -p out/license-audit
|
||||||
|
cd src/Web/StellaOps.Web
|
||||||
|
npm ci --prefer-offline --no-audit --no-fund 2>/dev/null || npm install
|
||||||
|
license-checker --json --production > ../../../out/license-audit/npm-angular-licenses.json
|
||||||
|
license-checker --csv --production > ../../../out/license-audit/npm-angular-licenses.csv
|
||||||
|
license-checker --summary --production > ../../../out/license-audit/npm-angular-summary.txt
|
||||||
|
|
||||||
|
- name: Audit DevPortal
|
||||||
|
run: |
|
||||||
|
cd src/DevPortal/StellaOps.DevPortal.Site
|
||||||
|
if [ -f package-lock.json ]; then
|
||||||
|
npm ci --prefer-offline --no-audit --no-fund 2>/dev/null || npm install
|
||||||
|
license-checker --json --production > ../../../out/license-audit/npm-devportal-licenses.json || true
|
||||||
|
fi
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Validate against allowlist
|
||||||
|
run: |
|
||||||
|
bash .gitea/scripts/validate/validate-licenses.sh npm out/license-audit/npm-angular-licenses.json
|
||||||
|
|
||||||
|
- name: Upload npm license report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: npm-license-report
|
||||||
|
path: out/license-audit
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
vendored-license-check:
|
||||||
|
name: Vendored Components Check
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
|
- name: Verify vendored license files exist
|
||||||
|
run: |
|
||||||
|
echo "Checking vendored license files..."
|
||||||
|
|
||||||
|
# Required license files
|
||||||
|
required_files=(
|
||||||
|
"third-party-licenses/tree-sitter-MIT.txt"
|
||||||
|
"third-party-licenses/tree-sitter-ruby-MIT.txt"
|
||||||
|
"third-party-licenses/AlexMAS.GostCryptography-MIT.txt"
|
||||||
|
)
|
||||||
|
|
||||||
|
missing=0
|
||||||
|
for file in "${required_files[@]}"; do
|
||||||
|
if [ ! -f "$file" ]; then
|
||||||
|
echo "ERROR: Missing required license file: $file"
|
||||||
|
missing=$((missing + 1))
|
||||||
|
else
|
||||||
|
echo "OK: $file"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ $missing -gt 0 ]; then
|
||||||
|
echo "ERROR: $missing required license file(s) missing"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "All vendored license files present."
|
||||||
|
|
||||||
|
- name: Verify NOTICE.md is up to date
|
||||||
|
run: |
|
||||||
|
echo "Checking NOTICE.md references..."
|
||||||
|
|
||||||
|
# Check that vendored components are mentioned in NOTICE.md
|
||||||
|
for component in "tree-sitter" "AlexMAS.GostCryptography" "CryptoPro"; do
|
||||||
|
if ! grep -q "$component" NOTICE.md; then
|
||||||
|
echo "WARNING: $component not mentioned in NOTICE.md"
|
||||||
|
else
|
||||||
|
echo "OK: $component referenced in NOTICE.md"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Verify vendored source has LICENSE
|
||||||
|
run: |
|
||||||
|
echo "Checking vendored source directories..."
|
||||||
|
|
||||||
|
# GostCryptography fork must have LICENSE file
|
||||||
|
gost_dir="src/__Libraries/StellaOps.Cryptography.Plugin.CryptoPro/third_party/AlexMAS.GostCryptography"
|
||||||
|
if [ -d "$gost_dir" ]; then
|
||||||
|
if [ ! -f "$gost_dir/LICENSE" ]; then
|
||||||
|
echo "ERROR: $gost_dir is missing LICENSE file"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "OK: $gost_dir/LICENSE exists"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
license-compatibility-check:
|
||||||
|
name: License Compatibility Check
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [nuget-license-audit, npm-license-audit]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download NuGet report
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: nuget-license-report
|
||||||
|
path: out/nuget
|
||||||
|
|
||||||
|
- name: Download npm report
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: npm-license-report
|
||||||
|
path: out/npm
|
||||||
|
|
||||||
|
- name: Check for incompatible licenses
|
||||||
|
run: |
|
||||||
|
echo "Checking for AGPL-3.0-or-later incompatible licenses..."
|
||||||
|
|
||||||
|
# Known incompatible licenses (SPDX identifiers)
|
||||||
|
incompatible=(
|
||||||
|
"GPL-2.0-only"
|
||||||
|
"SSPL-1.0"
|
||||||
|
"BUSL-1.1"
|
||||||
|
"Commons-Clause"
|
||||||
|
"Proprietary"
|
||||||
|
)
|
||||||
|
|
||||||
|
found_issues=0
|
||||||
|
|
||||||
|
# Check npm report
|
||||||
|
if [ -f out/npm/npm-angular-licenses.json ]; then
|
||||||
|
for license in "${incompatible[@]}"; do
|
||||||
|
if grep -qi "\"$license\"" out/npm/npm-angular-licenses.json; then
|
||||||
|
echo "ERROR: Incompatible license found in npm dependencies: $license"
|
||||||
|
found_issues=$((found_issues + 1))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ $found_issues -gt 0 ]; then
|
||||||
|
echo "ERROR: Found $found_issues incompatible license(s)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "All licenses compatible with AGPL-3.0-or-later"
|
||||||
|
|
||||||
|
- name: Generate combined report
|
||||||
|
run: |
|
||||||
|
mkdir -p out/combined
|
||||||
|
cat > out/combined/license-audit-summary.md << 'EOF'
|
||||||
|
# License Audit Summary
|
||||||
|
|
||||||
|
Generated: $(date -u +%Y-%m-%dT%H:%M:%SZ)
|
||||||
|
Commit: ${{ github.sha }}
|
||||||
|
|
||||||
|
## Status: PASSED
|
||||||
|
|
||||||
|
All dependencies use licenses compatible with AGPL-3.0-or-later.
|
||||||
|
|
||||||
|
## Allowed Licenses
|
||||||
|
- MIT
|
||||||
|
- Apache-2.0
|
||||||
|
- BSD-2-Clause
|
||||||
|
- BSD-3-Clause
|
||||||
|
- ISC
|
||||||
|
- 0BSD
|
||||||
|
- PostgreSQL
|
||||||
|
- MPL-2.0
|
||||||
|
- CC0-1.0
|
||||||
|
- Unlicense
|
||||||
|
|
||||||
|
## Reports
|
||||||
|
- NuGet: See nuget-license-report artifact
|
||||||
|
- npm: See npm-license-report artifact
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
- Full dependency list: docs/legal/THIRD-PARTY-DEPENDENCIES.md
|
||||||
|
- Compatibility analysis: docs/legal/LICENSE-COMPATIBILITY.md
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Upload combined report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: license-audit-summary
|
||||||
|
path: out/combined
|
||||||
|
retention-days: 90
|
||||||
188
.gitea/workflows/lighthouse-ci.yml
Normal file
188
.gitea/workflows/lighthouse-ci.yml
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
# .gitea/workflows/lighthouse-ci.yml
|
||||||
|
# Lighthouse CI for performance and accessibility testing of the StellaOps Web UI
|
||||||
|
|
||||||
|
name: Lighthouse CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Web/StellaOps.Web/**'
|
||||||
|
- '.gitea/workflows/lighthouse-ci.yml'
|
||||||
|
pull_request:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'src/Web/StellaOps.Web/**'
|
||||||
|
schedule:
|
||||||
|
# Run weekly on Sunday at 2 AM UTC
|
||||||
|
- cron: '0 2 * * 0'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
NODE_VERSION: '20'
|
||||||
|
LHCI_BUILD_CONTEXT__CURRENT_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||||
|
LHCI_BUILD_CONTEXT__COMMIT_SHA: ${{ github.sha }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lighthouse:
|
||||||
|
name: Lighthouse Audit
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: src/Web/StellaOps.Web
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Build production bundle
|
||||||
|
run: npm run build -- --configuration production
|
||||||
|
|
||||||
|
- name: Install Lighthouse CI
|
||||||
|
run: npm install -g @lhci/cli@0.13.x
|
||||||
|
|
||||||
|
- name: Run Lighthouse CI
|
||||||
|
run: |
|
||||||
|
lhci autorun \
|
||||||
|
--collect.staticDistDir=./dist/stella-ops-web/browser \
|
||||||
|
--collect.numberOfRuns=3 \
|
||||||
|
--assert.preset=lighthouse:recommended \
|
||||||
|
--assert.assertions.categories:performance=off \
|
||||||
|
--assert.assertions.categories:accessibility=off \
|
||||||
|
--upload.target=filesystem \
|
||||||
|
--upload.outputDir=./lighthouse-results
|
||||||
|
|
||||||
|
- name: Evaluate Lighthouse Results
|
||||||
|
id: lhci-results
|
||||||
|
run: |
|
||||||
|
# Parse the latest Lighthouse report
|
||||||
|
REPORT=$(ls -t lighthouse-results/*.json | head -1)
|
||||||
|
|
||||||
|
if [ -f "$REPORT" ]; then
|
||||||
|
PERF=$(jq '.categories.performance.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
A11Y=$(jq '.categories.accessibility.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
BP=$(jq '.categories["best-practices"].score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
SEO=$(jq '.categories.seo.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
|
||||||
|
echo "performance=$PERF" >> $GITHUB_OUTPUT
|
||||||
|
echo "accessibility=$A11Y" >> $GITHUB_OUTPUT
|
||||||
|
echo "best-practices=$BP" >> $GITHUB_OUTPUT
|
||||||
|
echo "seo=$SEO" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
echo "## Lighthouse Results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Category | Score | Threshold | Status |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|----------|-------|-----------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
# Performance: target >= 90
|
||||||
|
if [ "$PERF" -ge 90 ]; then
|
||||||
|
echo "| Performance | $PERF | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Performance | $PERF | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Accessibility: target >= 95
|
||||||
|
if [ "$A11Y" -ge 95 ]; then
|
||||||
|
echo "| Accessibility | $A11Y | >= 95 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Accessibility | $A11Y | >= 95 | :x: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Best Practices: target >= 90
|
||||||
|
if [ "$BP" -ge 90 ]; then
|
||||||
|
echo "| Best Practices | $BP | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Best Practices | $BP | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# SEO: target >= 90
|
||||||
|
if [ "$SEO" -ge 90 ]; then
|
||||||
|
echo "| SEO | $SEO | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| SEO | $SEO | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Check Quality Gates
|
||||||
|
run: |
|
||||||
|
PERF=${{ steps.lhci-results.outputs.performance }}
|
||||||
|
A11Y=${{ steps.lhci-results.outputs.accessibility }}
|
||||||
|
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
# Performance gate (warning only, not blocking)
|
||||||
|
if [ "$PERF" -lt 90 ]; then
|
||||||
|
echo "::warning::Performance score ($PERF) is below target (90)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Accessibility gate (blocking)
|
||||||
|
if [ "$A11Y" -lt 95 ]; then
|
||||||
|
echo "::error::Accessibility score ($A11Y) is below required threshold (95)"
|
||||||
|
FAILED=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$FAILED" -eq 1 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload Lighthouse Reports
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: lighthouse-reports
|
||||||
|
path: src/Web/StellaOps.Web/lighthouse-results/
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
axe-accessibility:
|
||||||
|
name: Axe Accessibility Audit
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: src/Web/StellaOps.Web
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Install Playwright browsers
|
||||||
|
run: npx playwright install --with-deps chromium
|
||||||
|
|
||||||
|
- name: Build production bundle
|
||||||
|
run: npm run build -- --configuration production
|
||||||
|
|
||||||
|
- name: Start preview server
|
||||||
|
run: |
|
||||||
|
npx serve -s dist/stella-ops-web/browser -l 4200 &
|
||||||
|
sleep 5
|
||||||
|
|
||||||
|
- name: Run Axe accessibility tests
|
||||||
|
run: |
|
||||||
|
npm run test:a11y || true
|
||||||
|
|
||||||
|
- name: Upload Axe results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: axe-accessibility-results
|
||||||
|
path: src/Web/StellaOps.Web/test-results/
|
||||||
|
retention-days: 30
|
||||||
64
.gitea/workflows/lnm-backfill.yml
Normal file
64
.gitea/workflows/lnm-backfill.yml
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
name: LNM Backfill CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
mongo_uri:
|
||||||
|
description: 'Staging Mongo URI (read-only snapshot)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
since_commit:
|
||||||
|
description: 'Git commit to compare (default HEAD)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
dry_run:
|
||||||
|
description: 'Dry run (no writes)'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lnm-backfill:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up .NET SDK
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/Concelier/StellaOps.Concelier.Backfill/StellaOps.Concelier.Backfill.csproj
|
||||||
|
|
||||||
|
- name: Run backfill (dry-run supported)
|
||||||
|
env:
|
||||||
|
STAGING_MONGO_URI: ${{ inputs.mongo_uri }}
|
||||||
|
run: |
|
||||||
|
mkdir -p $ARTIFACT_DIR
|
||||||
|
EXTRA=()
|
||||||
|
if [ "${{ inputs.dry_run }}" = "true" ]; then EXTRA+=("--dry-run"); fi
|
||||||
|
dotnet run --project src/Concelier/StellaOps.Concelier.Backfill/StellaOps.Concelier.Backfill.csproj -- --mode=observations --batch-size=500 --max-conflicts=0 --mongo "$STAGING_MONGO_URI" "${EXTRA[@]}" | tee $ARTIFACT_DIR/backfill-observations.log
|
||||||
|
dotnet run --project src/Concelier/StellaOps.Concelier.Backfill/StellaOps.Concelier.Backfill.csproj -- --mode=linksets --batch-size=500 --max-conflicts=0 --mongo "$STAGING_MONGO_URI" "${EXTRA[@]}" | tee $ARTIFACT_DIR/backfill-linksets.log
|
||||||
|
|
||||||
|
- name: Validate counts
|
||||||
|
env:
|
||||||
|
STAGING_MONGO_URI: ${{ inputs.mongo_uri }}
|
||||||
|
run: |
|
||||||
|
STAGING_MONGO_URI="$STAGING_MONGO_URI" devops/lnm/backfill-validation.sh
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: lnm-backfill-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
83
.gitea/workflows/lnm-migration-ci.yml
Normal file
83
.gitea/workflows/lnm-migration-ci.yml
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
name: LNM Migration CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
run_staging:
|
||||||
|
description: 'Run staging backfill (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Concelier/__Libraries/StellaOps.Concelier.Migrations/**'
|
||||||
|
- 'devops/lnm/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-runner:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${{ secrets.COSIGN_PRIVATE_KEY_B64 }}" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
|
||||||
|
- name: Build and package runner
|
||||||
|
run: |
|
||||||
|
chmod +x devops/lnm/package-runner.sh
|
||||||
|
devops/lnm/package-runner.sh
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/lnm
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: lnm-migration-runner-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/lnm/lnm-migration-runner.tar.gz
|
||||||
|
out/lnm/lnm-migration-runner.manifest.json
|
||||||
|
out/lnm/lnm-migration-runner.dsse.json
|
||||||
|
out/lnm/SHA256SUMS
|
||||||
|
if-no-files-found: warn
|
||||||
|
|
||||||
|
validate-metrics:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-runner
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate monitoring config
|
||||||
|
run: |
|
||||||
|
# Validate alert rules syntax
|
||||||
|
if [ -f "devops/lnm/alerts/lnm-alerts.yaml" ]; then
|
||||||
|
echo "Validating alert rules..."
|
||||||
|
python3 -c "import yaml; yaml.safe_load(open('devops/lnm/alerts/lnm-alerts.yaml'))"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate dashboard JSON
|
||||||
|
if [ -f "devops/lnm/dashboards/lnm-migration.json" ]; then
|
||||||
|
echo "Validating dashboard..."
|
||||||
|
python3 -c "import json; json.load(open('devops/lnm/dashboards/lnm-migration.json'))"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Monitoring config validation complete"
|
||||||
63
.gitea/workflows/lnm-vex-backfill.yml
Normal file
63
.gitea/workflows/lnm-vex-backfill.yml
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
name: LNM VEX Backfill
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
mongo_uri:
|
||||||
|
description: 'Staging Mongo URI'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
nats_url:
|
||||||
|
description: 'NATS URL'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
redis_url:
|
||||||
|
description: 'Redis URL'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
dry_run:
|
||||||
|
description: 'Dry run (no writes)'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
vex-backfill:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up .NET SDK
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/Concelier/StellaOps.Concelier.Backfill/StellaOps.Concelier.Backfill.csproj
|
||||||
|
|
||||||
|
- name: Run VEX backfill
|
||||||
|
env:
|
||||||
|
STAGING_MONGO_URI: ${{ inputs.mongo_uri }}
|
||||||
|
NATS_URL: ${{ inputs.nats_url }}
|
||||||
|
REDIS_URL: ${{ inputs.redis_url }}
|
||||||
|
run: |
|
||||||
|
mkdir -p $ARTIFACT_DIR
|
||||||
|
EXTRA=()
|
||||||
|
if [ "${{ inputs.dry_run }}" = "true" ]; then EXTRA+=("--dry-run"); fi
|
||||||
|
dotnet run --project src/Concelier/StellaOps.Concelier.Backfill/StellaOps.Concelier.Backfill.csproj -- --mode=vex --batch-size=500 --max-conflicts=0 --mongo "$STAGING_MONGO_URI" --nats "$NATS_URL" --redis "$REDIS_URL" "${EXTRA[@]}" | tee $ARTIFACT_DIR/vex-backfill.log
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: lnm-vex-backfill-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
125
.gitea/workflows/manifest-integrity.yml
Normal file
125
.gitea/workflows/manifest-integrity.yml
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
name: Manifest Integrity
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'docs/**/*.schema.json'
|
||||||
|
- 'docs/contracts/**'
|
||||||
|
- 'docs/schemas/**'
|
||||||
|
- 'scripts/packs/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'docs/**/*.schema.json'
|
||||||
|
- 'docs/contracts/**'
|
||||||
|
- 'docs/schemas/**'
|
||||||
|
- 'scripts/packs/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-schemas:
|
||||||
|
name: Validate Schema Integrity
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm install -g ajv-cli ajv-formats
|
||||||
|
|
||||||
|
- name: Validate JSON schemas
|
||||||
|
run: |
|
||||||
|
EXIT_CODE=0
|
||||||
|
for schema in docs/schemas/*.schema.json; do
|
||||||
|
echo "Validating $schema..."
|
||||||
|
if ! ajv compile -s "$schema" --spec=draft2020 2>/dev/null; then
|
||||||
|
echo "Error: $schema is invalid"
|
||||||
|
EXIT_CODE=1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
exit $EXIT_CODE
|
||||||
|
|
||||||
|
validate-contracts:
|
||||||
|
name: Validate Contract Documents
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check contract structure
|
||||||
|
run: |
|
||||||
|
for contract in docs/contracts/*.md; do
|
||||||
|
echo "Checking $contract..."
|
||||||
|
# Verify required sections exist
|
||||||
|
if ! grep -q "^## " "$contract"; then
|
||||||
|
echo "Warning: $contract missing section headers"
|
||||||
|
fi
|
||||||
|
# Check for decision ID
|
||||||
|
if grep -q "Decision ID" "$contract" && ! grep -q "DECISION-\|CONTRACT-" "$contract"; then
|
||||||
|
echo "Warning: $contract missing decision ID format"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
validate-pack-fixtures:
|
||||||
|
name: Validate Pack Fixtures
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pip install jsonschema
|
||||||
|
|
||||||
|
- name: Run fixture validation
|
||||||
|
run: |
|
||||||
|
if [ -f .gitea/scripts/test/run-fixtures-check.sh ]; then
|
||||||
|
chmod +x .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
./.gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
fi
|
||||||
|
|
||||||
|
checksum-audit:
|
||||||
|
name: Audit SHA256SUMS Files
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate checksums
|
||||||
|
run: |
|
||||||
|
find . -name "SHA256SUMS" -type f | while read f; do
|
||||||
|
dir=$(dirname "$f")
|
||||||
|
echo "Validating checksums in $dir..."
|
||||||
|
cd "$dir"
|
||||||
|
# Check if all referenced files exist
|
||||||
|
while read hash file; do
|
||||||
|
if [ ! -f "$file" ]; then
|
||||||
|
echo "Warning: $file referenced in SHA256SUMS but not found"
|
||||||
|
fi
|
||||||
|
done < SHA256SUMS
|
||||||
|
cd - > /dev/null
|
||||||
|
done
|
||||||
|
|
||||||
|
merkle-consistency:
|
||||||
|
name: Verify Merkle Roots
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check DSSE Merkle roots
|
||||||
|
run: |
|
||||||
|
find . -name "*.dsse.json" -type f | while read f; do
|
||||||
|
echo "Checking Merkle root in $f..."
|
||||||
|
# Extract and validate Merkle root if present
|
||||||
|
if jq -e '.payload' "$f" > /dev/null 2>&1; then
|
||||||
|
PAYLOAD=$(jq -r '.payload' "$f" | base64 -d 2>/dev/null || echo "")
|
||||||
|
if echo "$PAYLOAD" | jq -e '._stellaops.merkleRoot' > /dev/null 2>&1; then
|
||||||
|
MERKLE=$(echo "$PAYLOAD" | jq -r '._stellaops.merkleRoot')
|
||||||
|
echo " Merkle root: $MERKLE"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
74
.gitea/workflows/mirror-sign.yml
Normal file
74
.gitea/workflows/mirror-sign.yml
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
name: Mirror Thin Bundle Sign & Verify
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 6 * * *'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
mirror-sign:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
MIRROR_SIGN_KEY_B64: ${{ secrets.MIRROR_SIGN_KEY_B64 }}
|
||||||
|
REQUIRE_PROD_SIGNING: 1
|
||||||
|
OCI: 1
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Fallback to dev signing key when secret is absent (non-prod only)
|
||||||
|
run: |
|
||||||
|
if [ -z "${MIRROR_SIGN_KEY_B64}" ]; then
|
||||||
|
echo "[warn] MIRROR_SIGN_KEY_B64 not set; using repo dev key for non-production signing."
|
||||||
|
echo "MIRROR_SIGN_KEY_B64=$(base64 -w0 tools/cosign/cosign.dev.key)" >> $GITHUB_ENV
|
||||||
|
echo "REQUIRE_PROD_SIGNING=0" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Verify signing prerequisites
|
||||||
|
run: scripts/mirror/check_signing_prereqs.sh
|
||||||
|
|
||||||
|
- name: Run mirror signing
|
||||||
|
run: |
|
||||||
|
scripts/mirror/ci-sign.sh
|
||||||
|
|
||||||
|
- name: Verify signed bundle
|
||||||
|
run: |
|
||||||
|
scripts/mirror/verify_thin_bundle.py out/mirror/thin/mirror-thin-v1.tar.gz
|
||||||
|
|
||||||
|
- name: Prepare Export Center handoff (metadata + optional schedule)
|
||||||
|
run: |
|
||||||
|
scripts/mirror/export-center-wire.sh
|
||||||
|
env:
|
||||||
|
EXPORT_CENTER_BASE_URL: ${{ secrets.EXPORT_CENTER_BASE_URL }}
|
||||||
|
EXPORT_CENTER_TOKEN: ${{ secrets.EXPORT_CENTER_TOKEN }}
|
||||||
|
EXPORT_CENTER_TENANT: ${{ secrets.EXPORT_CENTER_TENANT }}
|
||||||
|
EXPORT_CENTER_PROJECT: ${{ secrets.EXPORT_CENTER_PROJECT }}
|
||||||
|
EXPORT_CENTER_AUTO_SCHEDULE: ${{ secrets.EXPORT_CENTER_AUTO_SCHEDULE }}
|
||||||
|
|
||||||
|
- name: Upload signed artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: mirror-thin-v1-signed
|
||||||
|
path: |
|
||||||
|
out/mirror/thin/mirror-thin-v1.tar.gz
|
||||||
|
out/mirror/thin/mirror-thin-v1.manifest.json
|
||||||
|
out/mirror/thin/mirror-thin-v1.manifest.dsse.json
|
||||||
|
out/mirror/thin/tuf/
|
||||||
|
out/mirror/thin/oci/
|
||||||
|
out/mirror/thin/milestone.json
|
||||||
|
out/mirror/thin/export-center/export-center-handoff.json
|
||||||
|
out/mirror/thin/export-center/export-center-targets.json
|
||||||
|
out/mirror/thin/export-center/schedule-response.json
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 14
|
||||||
44
.gitea/workflows/mock-dev-release.yml
Normal file
44
.gitea/workflows/mock-dev-release.yml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
name: mock-dev-release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- devops/releases/2025.09-mock-dev.yaml
|
||||||
|
- devops/downloads/manifest.json
|
||||||
|
- devops/mock-release/**
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
package-mock-release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Package mock dev artefacts
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
mkdir -p out/mock-release
|
||||||
|
cp devops/releases/2025.09-mock-dev.yaml out/mock-release/
|
||||||
|
cp devops/downloads/manifest.json out/mock-release/
|
||||||
|
tar -czf out/mock-release/mock-dev-release.tgz -C out/mock-release .
|
||||||
|
|
||||||
|
- name: Compose config (dev + mock overlay)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
devops/mock-release/config_check.sh
|
||||||
|
|
||||||
|
- name: Helm template (mock overlay)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
helm template mock ./devops/helm/stellaops -f devops/helm/stellaops/values-mock.yaml > /tmp/helm-mock.yaml
|
||||||
|
ls -lh /tmp/helm-mock.yaml
|
||||||
|
|
||||||
|
- name: Upload mock release bundle
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: mock-dev-release
|
||||||
|
path: |
|
||||||
|
out/mock-release/mock-dev-release.tgz
|
||||||
|
/tmp/compose-mock-config.yaml
|
||||||
|
/tmp/helm-mock.yaml
|
||||||
405
.gitea/workflows/module-publish.yml
Normal file
405
.gitea/workflows/module-publish.yml
Normal file
@@ -0,0 +1,405 @@
|
|||||||
|
# .gitea/workflows/module-publish.yml
|
||||||
|
# Per-module NuGet and container publishing to Gitea registry
|
||||||
|
# Sprint: SPRINT_20251226_004_CICD
|
||||||
|
|
||||||
|
name: Module Publish
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
module:
|
||||||
|
description: 'Module to publish'
|
||||||
|
required: true
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- Authority
|
||||||
|
- Attestor
|
||||||
|
- Concelier
|
||||||
|
- Scanner
|
||||||
|
- Policy
|
||||||
|
- Signer
|
||||||
|
- Excititor
|
||||||
|
- Gateway
|
||||||
|
- Scheduler
|
||||||
|
- Orchestrator
|
||||||
|
- TaskRunner
|
||||||
|
- Notify
|
||||||
|
- CLI
|
||||||
|
version:
|
||||||
|
description: 'Semantic version (e.g., 1.2.3)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
publish_nuget:
|
||||||
|
description: 'Publish NuGet packages'
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
publish_container:
|
||||||
|
description: 'Publish container image'
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
prerelease:
|
||||||
|
description: 'Mark as prerelease'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'module-*-v*' # e.g., module-authority-v1.2.3
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
REGISTRY: git.stella-ops.org
|
||||||
|
NUGET_SOURCE: https://git.stella-ops.org/api/packages/stella-ops.org/nuget/index.json
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# ===========================================================================
|
||||||
|
# PARSE TAG (for tag-triggered builds)
|
||||||
|
# ===========================================================================
|
||||||
|
|
||||||
|
parse-tag:
|
||||||
|
name: Parse Tag
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
outputs:
|
||||||
|
module: ${{ steps.parse.outputs.module }}
|
||||||
|
version: ${{ steps.parse.outputs.version }}
|
||||||
|
steps:
|
||||||
|
- name: Parse module and version from tag
|
||||||
|
id: parse
|
||||||
|
run: |
|
||||||
|
TAG="${{ github.ref_name }}"
|
||||||
|
# Expected format: module-{name}-v{version}
|
||||||
|
# Example: module-authority-v1.2.3
|
||||||
|
if [[ "$TAG" =~ ^module-([a-zA-Z]+)-v([0-9]+\.[0-9]+\.[0-9]+.*)$ ]]; then
|
||||||
|
MODULE="${BASH_REMATCH[1]}"
|
||||||
|
VERSION="${BASH_REMATCH[2]}"
|
||||||
|
# Capitalize first letter
|
||||||
|
MODULE="$(echo "${MODULE:0:1}" | tr '[:lower:]' '[:upper:]')${MODULE:1}"
|
||||||
|
echo "module=$MODULE" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Parsed: module=$MODULE, version=$VERSION"
|
||||||
|
else
|
||||||
|
echo "::error::Invalid tag format. Expected: module-{name}-v{version}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# VALIDATE
|
||||||
|
# ===========================================================================
|
||||||
|
|
||||||
|
validate:
|
||||||
|
name: Validate Inputs
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [parse-tag]
|
||||||
|
if: always() && (needs.parse-tag.result == 'success' || needs.parse-tag.result == 'skipped')
|
||||||
|
outputs:
|
||||||
|
module: ${{ steps.resolve.outputs.module }}
|
||||||
|
version: ${{ steps.resolve.outputs.version }}
|
||||||
|
publish_nuget: ${{ steps.resolve.outputs.publish_nuget }}
|
||||||
|
publish_container: ${{ steps.resolve.outputs.publish_container }}
|
||||||
|
steps:
|
||||||
|
- name: Resolve inputs
|
||||||
|
id: resolve
|
||||||
|
run: |
|
||||||
|
if [[ "${{ github.event_name }}" == "push" ]]; then
|
||||||
|
MODULE="${{ needs.parse-tag.outputs.module }}"
|
||||||
|
VERSION="${{ needs.parse-tag.outputs.version }}"
|
||||||
|
PUBLISH_NUGET="true"
|
||||||
|
PUBLISH_CONTAINER="true"
|
||||||
|
else
|
||||||
|
MODULE="${{ github.event.inputs.module }}"
|
||||||
|
VERSION="${{ github.event.inputs.version }}"
|
||||||
|
PUBLISH_NUGET="${{ github.event.inputs.publish_nuget }}"
|
||||||
|
PUBLISH_CONTAINER="${{ github.event.inputs.publish_container }}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "module=$MODULE" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "publish_nuget=$PUBLISH_NUGET" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "publish_container=$PUBLISH_CONTAINER" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
echo "=== Resolved Configuration ==="
|
||||||
|
echo "Module: $MODULE"
|
||||||
|
echo "Version: $VERSION"
|
||||||
|
echo "Publish NuGet: $PUBLISH_NUGET"
|
||||||
|
echo "Publish Container: $PUBLISH_CONTAINER"
|
||||||
|
|
||||||
|
- name: Validate version format
|
||||||
|
run: |
|
||||||
|
VERSION="${{ steps.resolve.outputs.version }}"
|
||||||
|
if ! [[ "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.]+)?$ ]]; then
|
||||||
|
echo "::error::Invalid version format. Expected: MAJOR.MINOR.PATCH[-prerelease]"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# PUBLISH NUGET
|
||||||
|
# ===========================================================================
|
||||||
|
|
||||||
|
publish-nuget:
|
||||||
|
name: Publish NuGet
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [validate]
|
||||||
|
if: needs.validate.outputs.publish_nuget == 'true'
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Determine project path
|
||||||
|
id: path
|
||||||
|
run: |
|
||||||
|
MODULE="${{ needs.validate.outputs.module }}"
|
||||||
|
|
||||||
|
# Map module names to project paths
|
||||||
|
case "$MODULE" in
|
||||||
|
Authority)
|
||||||
|
PROJECT="src/Authority/StellaOps.Authority.WebService/StellaOps.Authority.WebService.csproj"
|
||||||
|
;;
|
||||||
|
Attestor)
|
||||||
|
PROJECT="src/Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj"
|
||||||
|
;;
|
||||||
|
Concelier)
|
||||||
|
PROJECT="src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj"
|
||||||
|
;;
|
||||||
|
Scanner)
|
||||||
|
PROJECT="src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj"
|
||||||
|
;;
|
||||||
|
Policy)
|
||||||
|
PROJECT="src/Policy/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj"
|
||||||
|
;;
|
||||||
|
Signer)
|
||||||
|
PROJECT="src/Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj"
|
||||||
|
;;
|
||||||
|
Excititor)
|
||||||
|
PROJECT="src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj"
|
||||||
|
;;
|
||||||
|
Gateway)
|
||||||
|
PROJECT="src/Gateway/StellaOps.Gateway.WebService/StellaOps.Gateway.WebService.csproj"
|
||||||
|
;;
|
||||||
|
Scheduler)
|
||||||
|
PROJECT="src/Scheduler/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj"
|
||||||
|
;;
|
||||||
|
Orchestrator)
|
||||||
|
PROJECT="src/Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj"
|
||||||
|
;;
|
||||||
|
TaskRunner)
|
||||||
|
PROJECT="src/TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj"
|
||||||
|
;;
|
||||||
|
Notify)
|
||||||
|
PROJECT="src/Notify/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj"
|
||||||
|
;;
|
||||||
|
CLI)
|
||||||
|
PROJECT="src/Cli/StellaOps.Cli/StellaOps.Cli.csproj"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "::error::Unknown module: $MODULE"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo "project=$PROJECT" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Project path: $PROJECT"
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore ${{ steps.path.outputs.project }}
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
dotnet build ${{ steps.path.outputs.project }} \
|
||||||
|
--configuration Release \
|
||||||
|
--no-restore \
|
||||||
|
-p:Version=${{ needs.validate.outputs.version }}
|
||||||
|
|
||||||
|
- name: Pack NuGet
|
||||||
|
run: |
|
||||||
|
dotnet pack ${{ steps.path.outputs.project }} \
|
||||||
|
--configuration Release \
|
||||||
|
--no-build \
|
||||||
|
-p:Version=${{ needs.validate.outputs.version }} \
|
||||||
|
-p:PackageVersion=${{ needs.validate.outputs.version }} \
|
||||||
|
--output out/packages
|
||||||
|
|
||||||
|
- name: Push to Gitea NuGet registry
|
||||||
|
run: |
|
||||||
|
for nupkg in out/packages/*.nupkg; do
|
||||||
|
echo "Pushing: $nupkg"
|
||||||
|
dotnet nuget push "$nupkg" \
|
||||||
|
--source "${{ env.NUGET_SOURCE }}" \
|
||||||
|
--api-key "${{ secrets.GITEA_TOKEN }}" \
|
||||||
|
--skip-duplicate
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Upload NuGet artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: nuget-${{ needs.validate.outputs.module }}-${{ needs.validate.outputs.version }}
|
||||||
|
path: out/packages/*.nupkg
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# PUBLISH CONTAINER
|
||||||
|
# ===========================================================================
|
||||||
|
|
||||||
|
publish-container:
|
||||||
|
name: Publish Container
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [validate]
|
||||||
|
if: needs.validate.outputs.publish_container == 'true' && needs.validate.outputs.module != 'CLI'
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to Gitea Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITEA_TOKEN }}
|
||||||
|
|
||||||
|
- name: Determine image name
|
||||||
|
id: image
|
||||||
|
run: |
|
||||||
|
MODULE="${{ needs.validate.outputs.module }}"
|
||||||
|
VERSION="${{ needs.validate.outputs.version }}"
|
||||||
|
MODULE_LOWER=$(echo "$MODULE" | tr '[:upper:]' '[:lower:]')
|
||||||
|
|
||||||
|
IMAGE="${{ env.REGISTRY }}/stella-ops.org/${MODULE_LOWER}"
|
||||||
|
|
||||||
|
echo "name=$IMAGE" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "tag_version=${IMAGE}:${VERSION}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "tag_latest=${IMAGE}:latest" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
echo "Image: $IMAGE"
|
||||||
|
echo "Tags: ${VERSION}, latest"
|
||||||
|
|
||||||
|
- name: Build and push container
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: devops/docker/Dockerfile.platform
|
||||||
|
target: ${{ needs.validate.outputs.module | lower }}
|
||||||
|
push: true
|
||||||
|
tags: |
|
||||||
|
${{ steps.image.outputs.tag_version }}
|
||||||
|
${{ steps.image.outputs.tag_latest }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
labels: |
|
||||||
|
org.opencontainers.image.title=StellaOps ${{ needs.validate.outputs.module }}
|
||||||
|
org.opencontainers.image.version=${{ needs.validate.outputs.version }}
|
||||||
|
org.opencontainers.image.source=https://git.stella-ops.org/stella-ops.org/git.stella-ops.org
|
||||||
|
org.opencontainers.image.revision=${{ github.sha }}
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# PUBLISH CLI BINARIES (multi-platform)
|
||||||
|
# ===========================================================================
|
||||||
|
|
||||||
|
publish-cli:
|
||||||
|
name: Publish CLI (${{ matrix.runtime }})
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [validate]
|
||||||
|
if: needs.validate.outputs.module == 'CLI'
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runtime:
|
||||||
|
- linux-x64
|
||||||
|
- linux-arm64
|
||||||
|
- win-x64
|
||||||
|
- osx-x64
|
||||||
|
- osx-arm64
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Install cross-compilation tools
|
||||||
|
if: matrix.runtime == 'linux-arm64'
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends binutils-aarch64-linux-gnu
|
||||||
|
|
||||||
|
- name: Publish CLI
|
||||||
|
run: |
|
||||||
|
dotnet publish src/Cli/StellaOps.Cli/StellaOps.Cli.csproj \
|
||||||
|
--configuration Release \
|
||||||
|
--runtime ${{ matrix.runtime }} \
|
||||||
|
--self-contained true \
|
||||||
|
-p:Version=${{ needs.validate.outputs.version }} \
|
||||||
|
-p:PublishSingleFile=true \
|
||||||
|
-p:PublishTrimmed=true \
|
||||||
|
-p:EnableCompressionInSingleFile=true \
|
||||||
|
--output out/cli/${{ matrix.runtime }}
|
||||||
|
|
||||||
|
- name: Create archive
|
||||||
|
run: |
|
||||||
|
VERSION="${{ needs.validate.outputs.version }}"
|
||||||
|
RUNTIME="${{ matrix.runtime }}"
|
||||||
|
|
||||||
|
cd out/cli/$RUNTIME
|
||||||
|
if [[ "$RUNTIME" == win-* ]]; then
|
||||||
|
zip -r ../stellaops-cli-${VERSION}-${RUNTIME}.zip .
|
||||||
|
else
|
||||||
|
tar -czvf ../stellaops-cli-${VERSION}-${RUNTIME}.tar.gz .
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload CLI artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: cli-${{ needs.validate.outputs.version }}-${{ matrix.runtime }}
|
||||||
|
path: |
|
||||||
|
out/cli/*.zip
|
||||||
|
out/cli/*.tar.gz
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# SUMMARY
|
||||||
|
# ===========================================================================
|
||||||
|
|
||||||
|
summary:
|
||||||
|
name: Publish Summary
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [validate, publish-nuget, publish-container, publish-cli]
|
||||||
|
if: always()
|
||||||
|
steps:
|
||||||
|
- name: Generate Summary
|
||||||
|
run: |
|
||||||
|
echo "## Module Publish Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Property | Value |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Module | ${{ needs.validate.outputs.module }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Version | ${{ needs.validate.outputs.version }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| NuGet | ${{ needs.publish-nuget.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Container | ${{ needs.publish-container.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| CLI | ${{ needs.publish-cli.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "### Registry URLs" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- NuGet: \`${{ env.NUGET_SOURCE }}\`" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Container: \`${{ env.REGISTRY }}/stella-ops.org/${{ needs.validate.outputs.module | lower }}\`" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
- name: Check for failures
|
||||||
|
if: contains(needs.*.result, 'failure')
|
||||||
|
run: |
|
||||||
|
echo "::error::One or more publish jobs failed"
|
||||||
|
exit 1
|
||||||
102
.gitea/workflows/notify-smoke-test.yml
Normal file
102
.gitea/workflows/notify-smoke-test.yml
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
name: Notify Smoke Test
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Notify/**'
|
||||||
|
- 'src/Notifier/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Notify/**'
|
||||||
|
- 'src/Notifier/**'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
unit-tests:
|
||||||
|
name: Notify Unit Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/Notify/
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/Notify/ --no-restore
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: dotnet test src/Notify/ --no-build --verbosity normal
|
||||||
|
|
||||||
|
notifier-tests:
|
||||||
|
name: Notifier Service Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/Notifier/
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/Notifier/ --no-restore
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: dotnet test src/Notifier/ --no-build --verbosity normal
|
||||||
|
|
||||||
|
smoke-test:
|
||||||
|
name: Notification Smoke Test
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [unit-tests, notifier-tests]
|
||||||
|
services:
|
||||||
|
mongodb:
|
||||||
|
image: mongo:7.0
|
||||||
|
ports:
|
||||||
|
- 27017:27017
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Build Notifier
|
||||||
|
run: dotnet build src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/
|
||||||
|
|
||||||
|
- name: Start service
|
||||||
|
run: |
|
||||||
|
dotnet run --project src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/ &
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
- name: Health check
|
||||||
|
run: |
|
||||||
|
for i in {1..30}; do
|
||||||
|
if curl -s http://localhost:5000/health > /dev/null; then
|
||||||
|
echo "Service is healthy"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "Service failed to start"
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
- name: Test notification endpoint
|
||||||
|
run: |
|
||||||
|
# Test dry-run notification
|
||||||
|
curl -X POST http://localhost:5000/api/v1/notifications/test \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"channel": "log", "message": "Smoke test", "dryRun": true}' \
|
||||||
|
|| echo "Warning: Notification test endpoint not available"
|
||||||
59
.gitea/workflows/oas-ci.yml
Normal file
59
.gitea/workflows/oas-ci.yml
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
name: oas-ci
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "src/Api/**"
|
||||||
|
- "scripts/api-*.mjs"
|
||||||
|
- "package.json"
|
||||||
|
- "package-lock.json"
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "src/Api/**"
|
||||||
|
- "scripts/api-*.mjs"
|
||||||
|
- "package.json"
|
||||||
|
- "package-lock.json"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
oas-validate:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: "18"
|
||||||
|
|
||||||
|
- name: Install deps
|
||||||
|
run: npm install --ignore-scripts --no-progress
|
||||||
|
|
||||||
|
- name: Compose aggregate OpenAPI
|
||||||
|
run: npm run api:compose
|
||||||
|
|
||||||
|
- name: Lint (spectral)
|
||||||
|
run: npm run api:lint
|
||||||
|
|
||||||
|
- name: Validate examples coverage
|
||||||
|
run: npm run api:examples
|
||||||
|
|
||||||
|
- name: Compat diff (previous commit)
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
if git show HEAD~1:src/Api/StellaOps.Api.OpenApi/stella.yaml > /tmp/stella-prev.yaml 2>/dev/null; then
|
||||||
|
node scripts/api-compat-diff.mjs /tmp/stella-prev.yaml src/Api/StellaOps.Api.OpenApi/stella.yaml --output text --fail-on-breaking
|
||||||
|
else
|
||||||
|
echo "[oas-ci] previous stella.yaml not found; skipping"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Contract tests
|
||||||
|
run: npm run api:compat:test
|
||||||
|
|
||||||
|
- name: Upload aggregate spec
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: stella-openapi
|
||||||
|
path: src/Api/StellaOps.Api.OpenApi/stella.yaml
|
||||||
46
.gitea/workflows/obs-slo.yml
Normal file
46
.gitea/workflows/obs-slo.yml
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
name: obs-slo
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
prom_url:
|
||||||
|
description: "Prometheus base URL"
|
||||||
|
required: true
|
||||||
|
default: "http://localhost:9090"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
slo-eval:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup Python (telemetry schema checks)
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
|
||||||
|
- name: Install telemetry schema deps
|
||||||
|
run: python -m pip install --upgrade pip jsonschema
|
||||||
|
|
||||||
|
- name: Run SLO evaluator
|
||||||
|
env:
|
||||||
|
PROM_URL: ${{ github.event.inputs.prom_url }}
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/observability/slo-evaluator.sh
|
||||||
|
scripts/observability/slo-evaluator.sh
|
||||||
|
|
||||||
|
- name: Telemetry schema/bundle checks
|
||||||
|
env:
|
||||||
|
TELEMETRY_BUNDLE_SCHEMA: docs/modules/telemetry/schemas/telemetry-bundle.schema.json
|
||||||
|
run: |
|
||||||
|
chmod +x devops/telemetry/tests/ci-run.sh
|
||||||
|
devops/telemetry/tests/ci-run.sh
|
||||||
|
|
||||||
|
- name: Upload SLO results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: obs-slo
|
||||||
|
path: out/obs-slo/**
|
||||||
37
.gitea/workflows/obs-stream.yml
Normal file
37
.gitea/workflows/obs-stream.yml
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
name: obs-stream
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
nats_url:
|
||||||
|
description: "NATS server URL"
|
||||||
|
required: false
|
||||||
|
default: "nats://localhost:4222"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
stream-validate:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Install nats CLI
|
||||||
|
run: |
|
||||||
|
curl -sSL https://github.com/nats-io/natscli/releases/download/v0.1.4/nats-0.1.4-linux-amd64.tar.gz -o /tmp/natscli.tgz
|
||||||
|
tar -C /tmp -xzf /tmp/natscli.tgz
|
||||||
|
sudo mv /tmp/nats /usr/local/bin/nats
|
||||||
|
|
||||||
|
- name: Validate streaming knobs
|
||||||
|
env:
|
||||||
|
NATS_URL: ${{ github.event.inputs.nats_url }}
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/observability/streaming-validate.sh
|
||||||
|
scripts/observability/streaming-validate.sh
|
||||||
|
|
||||||
|
- name: Upload stream validation
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: obs-stream
|
||||||
|
path: out/obs-stream/**
|
||||||
121
.gitea/workflows/offline-e2e.yml
Normal file
121
.gitea/workflows/offline-e2e.yml
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
name: Offline E2E Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/AirGap/**'
|
||||||
|
- 'src/Scanner/**'
|
||||||
|
- 'src/__Tests/offline/**'
|
||||||
|
schedule:
|
||||||
|
- cron: '0 4 * * *' # Nightly at 4 AM UTC
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
STELLAOPS_OFFLINE_MODE: 'true'
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
offline-e2e:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/.nuget/packages
|
||||||
|
key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.csproj') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-nuget-
|
||||||
|
|
||||||
|
- name: Download offline bundle
|
||||||
|
run: |
|
||||||
|
# In real scenario, bundle would be pre-built and cached
|
||||||
|
# For now, create minimal fixture structure
|
||||||
|
mkdir -p ./offline-bundle/{images,feeds,policies,keys,certs,vex}
|
||||||
|
echo '{}' > ./offline-bundle/manifest.json
|
||||||
|
|
||||||
|
- name: Build in isolated environment
|
||||||
|
run: |
|
||||||
|
# Build offline test library
|
||||||
|
dotnet build src/__Libraries/StellaOps.Testing.AirGap/StellaOps.Testing.AirGap.csproj
|
||||||
|
|
||||||
|
# Build offline E2E tests
|
||||||
|
dotnet build src/__Tests/offline/StellaOps.Offline.E2E.Tests/StellaOps.Offline.E2E.Tests.csproj
|
||||||
|
|
||||||
|
- name: Run offline E2E tests with network isolation
|
||||||
|
run: |
|
||||||
|
# Set offline bundle path
|
||||||
|
export STELLAOPS_OFFLINE_BUNDLE=$(pwd)/offline-bundle
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
dotnet test src/__Tests/offline/StellaOps.Offline.E2E.Tests \
|
||||||
|
--logger "trx;LogFileName=offline-e2e.trx" \
|
||||||
|
--logger "console;verbosity=detailed" \
|
||||||
|
--results-directory ./results
|
||||||
|
|
||||||
|
- name: Verify no network calls
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
# Parse test output for any NetworkIsolationViolationException
|
||||||
|
if [ -f "./results/offline-e2e.trx" ]; then
|
||||||
|
if grep -q "NetworkIsolationViolation" ./results/offline-e2e.trx; then
|
||||||
|
echo "::error::Tests attempted network calls in offline mode!"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "✅ No network isolation violations detected"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: offline-e2e-results
|
||||||
|
path: ./results/
|
||||||
|
|
||||||
|
verify-isolation:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: offline-e2e
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Download results
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: offline-e2e-results
|
||||||
|
path: ./results
|
||||||
|
|
||||||
|
- name: Generate summary
|
||||||
|
run: |
|
||||||
|
echo "## Offline E2E Test Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
if [ -f "./results/offline-e2e.trx" ]; then
|
||||||
|
# Parse test results
|
||||||
|
TOTAL=$(grep -o 'total="[0-9]*"' ./results/offline-e2e.trx | cut -d'"' -f2 || echo "0")
|
||||||
|
PASSED=$(grep -o 'passed="[0-9]*"' ./results/offline-e2e.trx | cut -d'"' -f2 || echo "0")
|
||||||
|
FAILED=$(grep -o 'failed="[0-9]*"' ./results/offline-e2e.trx | cut -d'"' -f2 || echo "0")
|
||||||
|
|
||||||
|
echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Total Tests | ${TOTAL} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Passed | ${PASSED} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Failed | ${FAILED} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
if grep -q "NetworkIsolationViolation" ./results/offline-e2e.trx; then
|
||||||
|
echo "❌ **Network isolation was violated**" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "✅ **Network isolation verified - no egress detected**" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "⚠️ No test results found" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
186
.gitea/workflows/parity-tests.yml
Normal file
186
.gitea/workflows/parity-tests.yml
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
name: Parity Tests
|
||||||
|
|
||||||
|
# Parity testing workflow: compares StellaOps against competitor scanners
|
||||||
|
# (Syft, Grype, Trivy) on a standardized fixture set.
|
||||||
|
#
|
||||||
|
# Schedule: Nightly at 02:00 UTC; Weekly full run on Sunday 00:00 UTC
|
||||||
|
# NOT a PR gate - too slow and has external dependencies
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
# Nightly at 02:00 UTC (quick fixture set)
|
||||||
|
- cron: '0 2 * * *'
|
||||||
|
# Weekly on Sunday at 00:00 UTC (full fixture set)
|
||||||
|
- cron: '0 0 * * 0'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
fixture_set:
|
||||||
|
description: 'Fixture set to use'
|
||||||
|
required: false
|
||||||
|
default: 'quick'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- quick
|
||||||
|
- full
|
||||||
|
enable_drift_detection:
|
||||||
|
description: 'Enable drift detection analysis'
|
||||||
|
required: false
|
||||||
|
default: 'true'
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
SYFT_VERSION: '1.9.0'
|
||||||
|
GRYPE_VERSION: '0.79.3'
|
||||||
|
TRIVY_VERSION: '0.54.1'
|
||||||
|
PARITY_RESULTS_PATH: 'bench/results/parity'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
parity-tests:
|
||||||
|
name: Competitor Parity Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 120
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Install Syft
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.SYFT_VERSION }}
|
||||||
|
syft version
|
||||||
|
|
||||||
|
- name: Install Grype
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.GRYPE_VERSION }}
|
||||||
|
grype version
|
||||||
|
|
||||||
|
- name: Install Trivy
|
||||||
|
run: |
|
||||||
|
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v${{ env.TRIVY_VERSION }}
|
||||||
|
trivy --version
|
||||||
|
|
||||||
|
- name: Determine fixture set
|
||||||
|
id: fixtures
|
||||||
|
run: |
|
||||||
|
# Weekly runs use full fixture set
|
||||||
|
if [[ "${{ github.event.schedule }}" == "0 0 * * 0" ]]; then
|
||||||
|
echo "fixture_set=full" >> $GITHUB_OUTPUT
|
||||||
|
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||||
|
echo "fixture_set=${{ inputs.fixture_set }}" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "fixture_set=quick" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build parity tests
|
||||||
|
run: |
|
||||||
|
dotnet build src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj -c Release
|
||||||
|
|
||||||
|
- name: Run parity tests
|
||||||
|
id: parity
|
||||||
|
run: |
|
||||||
|
mkdir -p ${{ env.PARITY_RESULTS_PATH }}
|
||||||
|
RUN_ID=$(date -u +%Y%m%dT%H%M%SZ)
|
||||||
|
echo "run_id=${RUN_ID}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
dotnet test src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=parity-results.trx" \
|
||||||
|
--results-directory ${{ env.PARITY_RESULTS_PATH }} \
|
||||||
|
-e PARITY_FIXTURE_SET=${{ steps.fixtures.outputs.fixture_set }} \
|
||||||
|
-e PARITY_RUN_ID=${RUN_ID} \
|
||||||
|
-e PARITY_OUTPUT_PATH=${{ env.PARITY_RESULTS_PATH }} \
|
||||||
|
|| true # Don't fail workflow on test failures
|
||||||
|
|
||||||
|
- name: Store parity results
|
||||||
|
run: |
|
||||||
|
# Copy JSON results to time-series storage
|
||||||
|
if [ -f "${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json" ]; then
|
||||||
|
echo "Parity results stored successfully"
|
||||||
|
cat ${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json | jq .
|
||||||
|
else
|
||||||
|
echo "Warning: No parity results file found"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Run drift detection
|
||||||
|
if: ${{ github.event_name != 'workflow_dispatch' || inputs.enable_drift_detection == 'true' }}
|
||||||
|
run: |
|
||||||
|
# Analyze drift from historical results
|
||||||
|
dotnet run --project src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj \
|
||||||
|
--no-build \
|
||||||
|
-- analyze-drift \
|
||||||
|
--results-path ${{ env.PARITY_RESULTS_PATH }} \
|
||||||
|
--threshold 0.05 \
|
||||||
|
--trend-days 3 \
|
||||||
|
|| true
|
||||||
|
|
||||||
|
- name: Upload parity results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: parity-results-${{ steps.parity.outputs.run_id }}
|
||||||
|
path: ${{ env.PARITY_RESULTS_PATH }}
|
||||||
|
retention-days: 90
|
||||||
|
|
||||||
|
- name: Export Prometheus metrics
|
||||||
|
if: ${{ env.PROMETHEUS_PUSH_GATEWAY != '' }}
|
||||||
|
env:
|
||||||
|
PROMETHEUS_PUSH_GATEWAY: ${{ secrets.PROMETHEUS_PUSH_GATEWAY }}
|
||||||
|
run: |
|
||||||
|
# Push metrics to Prometheus Push Gateway if configured
|
||||||
|
if [ -f "${{ env.PARITY_RESULTS_PATH }}/parity-metrics.txt" ]; then
|
||||||
|
curl -X POST \
|
||||||
|
-H "Content-Type: text/plain" \
|
||||||
|
--data-binary @${{ env.PARITY_RESULTS_PATH }}/parity-metrics.txt \
|
||||||
|
"${PROMETHEUS_PUSH_GATEWAY}/metrics/job/parity_tests/instance/${{ steps.parity.outputs.run_id }}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Generate comparison report
|
||||||
|
run: |
|
||||||
|
echo "## Parity Test Results - ${{ steps.parity.outputs.run_id }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**Fixture Set:** ${{ steps.fixtures.outputs.fixture_set }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**Competitor Versions:**" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Syft: ${{ env.SYFT_VERSION }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Grype: ${{ env.GRYPE_VERSION }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Trivy: ${{ env.TRIVY_VERSION }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
if [ -f "${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json" ]; then
|
||||||
|
echo "### Metrics Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
jq -r '
|
||||||
|
"| Metric | StellaOps | Grype | Trivy |",
|
||||||
|
"|--------|-----------|-------|-------|",
|
||||||
|
"| SBOM Packages | \(.sbomMetrics.stellaOpsPackageCount) | \(.sbomMetrics.syftPackageCount) | - |",
|
||||||
|
"| Vulnerability Recall | \(.vulnMetrics.recall | . * 100 | round / 100)% | - | - |",
|
||||||
|
"| Vulnerability F1 | \(.vulnMetrics.f1Score | . * 100 | round / 100)% | - | - |",
|
||||||
|
"| Latency P95 (ms) | \(.latencyMetrics.stellaOpsP95Ms | round) | \(.latencyMetrics.grypeP95Ms | round) | \(.latencyMetrics.trivyP95Ms | round) |"
|
||||||
|
' ${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json >> $GITHUB_STEP_SUMMARY || echo "Could not parse results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Alert on critical drift
|
||||||
|
if: failure()
|
||||||
|
uses: slackapi/slack-github-action@v1.25.0
|
||||||
|
with:
|
||||||
|
payload: |
|
||||||
|
{
|
||||||
|
"text": "⚠️ Parity test drift detected",
|
||||||
|
"blocks": [
|
||||||
|
{
|
||||||
|
"type": "section",
|
||||||
|
"text": {
|
||||||
|
"type": "mrkdwn",
|
||||||
|
"text": "*Parity Test Alert*\nDrift detected in competitor comparison metrics.\n<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Results>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
env:
|
||||||
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||||
|
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
|
||||||
|
continue-on-error: true
|
||||||
70
.gitea/workflows/policy-lint.yml
Normal file
70
.gitea/workflows/policy-lint.yml
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
name: Policy Lint & Smoke
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'docs/policy/**'
|
||||||
|
- 'docs/examples/policies/**'
|
||||||
|
- 'src/Cli/**'
|
||||||
|
- '.gitea/workflows/policy-lint.yml'
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'docs/policy/**'
|
||||||
|
- 'docs/examples/policies/**'
|
||||||
|
- 'src/Cli/**'
|
||||||
|
- '.gitea/workflows/policy-lint.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
policy-lint:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET 10 RC
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.nuget/packages
|
||||||
|
.nuget/packages
|
||||||
|
key: policy-lint-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||||
|
|
||||||
|
- name: Restore CLI
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Cli/StellaOps.Cli/StellaOps.Cli.csproj --configfile nuget.config
|
||||||
|
|
||||||
|
- name: Lint policies (deterministic)
|
||||||
|
run: |
|
||||||
|
mkdir -p out/policy-lint
|
||||||
|
dotnet run --project src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -- \
|
||||||
|
policy lint docs/examples/policies/*.stella \
|
||||||
|
--format json --no-color \
|
||||||
|
> out/policy-lint/lint.json
|
||||||
|
|
||||||
|
- name: Smoke simulate entrypoint
|
||||||
|
run: |
|
||||||
|
dotnet run --project src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -- policy simulate --help > out/policy-lint/simulate-help.txt
|
||||||
|
|
||||||
|
- name: Upload lint artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: policy-lint
|
||||||
|
path: out/policy-lint
|
||||||
|
retention-days: 7
|
||||||
89
.gitea/workflows/policy-simulate.yml
Normal file
89
.gitea/workflows/policy-simulate.yml
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
name: Policy Simulation
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'docs/policy/**'
|
||||||
|
- 'docs/examples/policies/**'
|
||||||
|
- 'scripts/policy/**'
|
||||||
|
- '.gitea/workflows/policy-simulate.yml'
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'docs/policy/**'
|
||||||
|
- 'docs/examples/policies/**'
|
||||||
|
- 'scripts/policy/**'
|
||||||
|
- '.gitea/workflows/policy-simulate.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
policy-simulate:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
THRESHOLD: 0
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET 10 RC
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Install Cosign
|
||||||
|
uses: sigstore/cosign-installer@v3.4.0
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.nuget/packages
|
||||||
|
.nuget/packages
|
||||||
|
key: policy-sim-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||||
|
|
||||||
|
- name: Restore CLI
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Cli/StellaOps.Cli/StellaOps.Cli.csproj --configfile nuget.config
|
||||||
|
|
||||||
|
- name: Generate policy signing key (ephemeral)
|
||||||
|
run: |
|
||||||
|
OUT_DIR=out/policy-sign/keys PREFIX=ci-policy COSIGN_PASSWORD= scripts/policy/rotate-key.sh
|
||||||
|
|
||||||
|
- name: Sign sample policy blob
|
||||||
|
run: |
|
||||||
|
export COSIGN_KEY_B64=$(base64 -w0 out/policy-sign/keys/ci-policy-cosign.key)
|
||||||
|
COSIGN_PASSWORD= \
|
||||||
|
.gitea/scripts/sign/sign-policy.sh --file docs/examples/policies/baseline.stella --out-dir out/policy-sign
|
||||||
|
|
||||||
|
- name: Attest and verify sample policy blob
|
||||||
|
run: |
|
||||||
|
export COSIGN_KEY_B64=$(base64 -w0 out/policy-sign/keys/ci-policy-cosign.key)
|
||||||
|
COSIGN_PASSWORD= \
|
||||||
|
scripts/policy/attest-verify.sh --file docs/examples/policies/baseline.stella --out-dir out/policy-sign
|
||||||
|
|
||||||
|
- name: Run batch policy simulation
|
||||||
|
run: |
|
||||||
|
scripts/policy/batch-simulate.sh
|
||||||
|
|
||||||
|
- name: Upload simulation artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: policy-simulation
|
||||||
|
path: out/policy-sim
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
- name: Upload signing artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: policy-signing
|
||||||
|
path: out/policy-sign
|
||||||
|
retention-days: 7
|
||||||
@@ -1,206 +1,209 @@
|
|||||||
# .gitea/workflows/promote.yml
|
# .gitea/workflows/promote.yml
|
||||||
# Manual promotion workflow to copy staged artefacts to production
|
# Manual promotion workflow to copy staged artefacts to production
|
||||||
|
|
||||||
name: Promote Feedser (Manual)
|
name: Promote Feedser (Manual)
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
include_docs:
|
include_docs:
|
||||||
description: 'Also promote the generated documentation bundle'
|
description: 'Also promote the generated documentation bundle'
|
||||||
required: false
|
required: false
|
||||||
default: 'true'
|
default: 'true'
|
||||||
type: boolean
|
type: boolean
|
||||||
tag:
|
tag:
|
||||||
description: 'Optional build identifier to record in the summary'
|
description: 'Optional build identifier to record in the summary'
|
||||||
required: false
|
required: false
|
||||||
default: 'latest'
|
default: 'latest'
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
promote:
|
promote:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
environment: production
|
environment: production
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Resolve staging credentials
|
- name: Task Pack offline bundle fixtures
|
||||||
id: staging
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
run: |
|
|
||||||
missing=()
|
- name: Resolve staging credentials
|
||||||
|
id: staging
|
||||||
host="${{ secrets.STAGING_DEPLOYMENT_HOST }}"
|
run: |
|
||||||
if [ -z "$host" ]; then host="${{ vars.STAGING_DEPLOYMENT_HOST }}"; fi
|
missing=()
|
||||||
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
|
|
||||||
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
|
host="${{ secrets.STAGING_DEPLOYMENT_HOST }}"
|
||||||
if [ -z "$host" ]; then missing+=("STAGING_DEPLOYMENT_HOST"); fi
|
if [ -z "$host" ]; then host="${{ vars.STAGING_DEPLOYMENT_HOST }}"; fi
|
||||||
|
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
|
||||||
user="${{ secrets.STAGING_DEPLOYMENT_USERNAME }}"
|
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
|
||||||
if [ -z "$user" ]; then user="${{ vars.STAGING_DEPLOYMENT_USERNAME }}"; fi
|
if [ -z "$host" ]; then missing+=("STAGING_DEPLOYMENT_HOST"); fi
|
||||||
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
|
|
||||||
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
|
user="${{ secrets.STAGING_DEPLOYMENT_USERNAME }}"
|
||||||
if [ -z "$user" ]; then missing+=("STAGING_DEPLOYMENT_USERNAME"); fi
|
if [ -z "$user" ]; then user="${{ vars.STAGING_DEPLOYMENT_USERNAME }}"; fi
|
||||||
|
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
|
||||||
path="${{ secrets.STAGING_DEPLOYMENT_PATH }}"
|
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
|
||||||
if [ -z "$path" ]; then path="${{ vars.STAGING_DEPLOYMENT_PATH }}"; fi
|
if [ -z "$user" ]; then missing+=("STAGING_DEPLOYMENT_USERNAME"); fi
|
||||||
if [ -z "$path" ]; then missing+=("STAGING_DEPLOYMENT_PATH")
|
|
||||||
fi
|
path="${{ secrets.STAGING_DEPLOYMENT_PATH }}"
|
||||||
|
if [ -z "$path" ]; then path="${{ vars.STAGING_DEPLOYMENT_PATH }}"; fi
|
||||||
docs_path="${{ secrets.STAGING_DOCS_PATH }}"
|
if [ -z "$path" ]; then missing+=("STAGING_DEPLOYMENT_PATH")
|
||||||
if [ -z "$docs_path" ]; then docs_path="${{ vars.STAGING_DOCS_PATH }}"; fi
|
fi
|
||||||
|
|
||||||
key="${{ secrets.STAGING_DEPLOYMENT_KEY }}"
|
docs_path="${{ secrets.STAGING_DOCS_PATH }}"
|
||||||
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
|
if [ -z "$docs_path" ]; then docs_path="${{ vars.STAGING_DOCS_PATH }}"; fi
|
||||||
if [ -z "$key" ]; then key="${{ vars.STAGING_DEPLOYMENT_KEY }}"; fi
|
|
||||||
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
|
key="${{ secrets.STAGING_DEPLOYMENT_KEY }}"
|
||||||
if [ -z "$key" ]; then missing+=("STAGING_DEPLOYMENT_KEY"); fi
|
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
|
||||||
|
if [ -z "$key" ]; then key="${{ vars.STAGING_DEPLOYMENT_KEY }}"; fi
|
||||||
if [ ${#missing[@]} -gt 0 ]; then
|
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
|
||||||
echo "❌ Missing staging configuration: ${missing[*]}"
|
if [ -z "$key" ]; then missing+=("STAGING_DEPLOYMENT_KEY"); fi
|
||||||
exit 1
|
|
||||||
fi
|
if [ ${#missing[@]} -gt 0 ]; then
|
||||||
|
echo "❌ Missing staging configuration: ${missing[*]}"
|
||||||
key_file="$RUNNER_TEMP/staging_key"
|
exit 1
|
||||||
printf '%s\n' "$key" > "$key_file"
|
fi
|
||||||
chmod 600 "$key_file"
|
|
||||||
|
key_file="$RUNNER_TEMP/staging_key"
|
||||||
echo "host=$host" >> $GITHUB_OUTPUT
|
printf '%s\n' "$key" > "$key_file"
|
||||||
echo "user=$user" >> $GITHUB_OUTPUT
|
chmod 600 "$key_file"
|
||||||
echo "path=$path" >> $GITHUB_OUTPUT
|
|
||||||
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
|
echo "host=$host" >> $GITHUB_OUTPUT
|
||||||
echo "key-file=$key_file" >> $GITHUB_OUTPUT
|
echo "user=$user" >> $GITHUB_OUTPUT
|
||||||
|
echo "path=$path" >> $GITHUB_OUTPUT
|
||||||
- name: Resolve production credentials
|
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
|
||||||
id: production
|
echo "key-file=$key_file" >> $GITHUB_OUTPUT
|
||||||
run: |
|
|
||||||
missing=()
|
- name: Resolve production credentials
|
||||||
|
id: production
|
||||||
host="${{ secrets.PRODUCTION_DEPLOYMENT_HOST }}"
|
run: |
|
||||||
if [ -z "$host" ]; then host="${{ vars.PRODUCTION_DEPLOYMENT_HOST }}"; fi
|
missing=()
|
||||||
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
|
|
||||||
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
|
host="${{ secrets.PRODUCTION_DEPLOYMENT_HOST }}"
|
||||||
if [ -z "$host" ]; then missing+=("PRODUCTION_DEPLOYMENT_HOST"); fi
|
if [ -z "$host" ]; then host="${{ vars.PRODUCTION_DEPLOYMENT_HOST }}"; fi
|
||||||
|
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
|
||||||
user="${{ secrets.PRODUCTION_DEPLOYMENT_USERNAME }}"
|
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
|
||||||
if [ -z "$user" ]; then user="${{ vars.PRODUCTION_DEPLOYMENT_USERNAME }}"; fi
|
if [ -z "$host" ]; then missing+=("PRODUCTION_DEPLOYMENT_HOST"); fi
|
||||||
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
|
|
||||||
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
|
user="${{ secrets.PRODUCTION_DEPLOYMENT_USERNAME }}"
|
||||||
if [ -z "$user" ]; then missing+=("PRODUCTION_DEPLOYMENT_USERNAME"); fi
|
if [ -z "$user" ]; then user="${{ vars.PRODUCTION_DEPLOYMENT_USERNAME }}"; fi
|
||||||
|
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
|
||||||
path="${{ secrets.PRODUCTION_DEPLOYMENT_PATH }}"
|
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
|
||||||
if [ -z "$path" ]; then path="${{ vars.PRODUCTION_DEPLOYMENT_PATH }}"; fi
|
if [ -z "$user" ]; then missing+=("PRODUCTION_DEPLOYMENT_USERNAME"); fi
|
||||||
if [ -z "$path" ]; then missing+=("PRODUCTION_DEPLOYMENT_PATH")
|
|
||||||
fi
|
path="${{ secrets.PRODUCTION_DEPLOYMENT_PATH }}"
|
||||||
|
if [ -z "$path" ]; then path="${{ vars.PRODUCTION_DEPLOYMENT_PATH }}"; fi
|
||||||
docs_path="${{ secrets.PRODUCTION_DOCS_PATH }}"
|
if [ -z "$path" ]; then missing+=("PRODUCTION_DEPLOYMENT_PATH")
|
||||||
if [ -z "$docs_path" ]; then docs_path="${{ vars.PRODUCTION_DOCS_PATH }}"; fi
|
fi
|
||||||
|
|
||||||
key="${{ secrets.PRODUCTION_DEPLOYMENT_KEY }}"
|
docs_path="${{ secrets.PRODUCTION_DOCS_PATH }}"
|
||||||
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
|
if [ -z "$docs_path" ]; then docs_path="${{ vars.PRODUCTION_DOCS_PATH }}"; fi
|
||||||
if [ -z "$key" ]; then key="${{ vars.PRODUCTION_DEPLOYMENT_KEY }}"; fi
|
|
||||||
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
|
key="${{ secrets.PRODUCTION_DEPLOYMENT_KEY }}"
|
||||||
if [ -z "$key" ]; then missing+=("PRODUCTION_DEPLOYMENT_KEY"); fi
|
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
|
||||||
|
if [ -z "$key" ]; then key="${{ vars.PRODUCTION_DEPLOYMENT_KEY }}"; fi
|
||||||
if [ ${#missing[@]} -gt 0 ]; then
|
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
|
||||||
echo "❌ Missing production configuration: ${missing[*]}"
|
if [ -z "$key" ]; then missing+=("PRODUCTION_DEPLOYMENT_KEY"); fi
|
||||||
exit 1
|
|
||||||
fi
|
if [ ${#missing[@]} -gt 0 ]; then
|
||||||
|
echo "❌ Missing production configuration: ${missing[*]}"
|
||||||
key_file="$RUNNER_TEMP/production_key"
|
exit 1
|
||||||
printf '%s\n' "$key" > "$key_file"
|
fi
|
||||||
chmod 600 "$key_file"
|
|
||||||
|
key_file="$RUNNER_TEMP/production_key"
|
||||||
echo "host=$host" >> $GITHUB_OUTPUT
|
printf '%s\n' "$key" > "$key_file"
|
||||||
echo "user=$user" >> $GITHUB_OUTPUT
|
chmod 600 "$key_file"
|
||||||
echo "path=$path" >> $GITHUB_OUTPUT
|
|
||||||
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
|
echo "host=$host" >> $GITHUB_OUTPUT
|
||||||
echo "key-file=$key_file" >> $GITHUB_OUTPUT
|
echo "user=$user" >> $GITHUB_OUTPUT
|
||||||
|
echo "path=$path" >> $GITHUB_OUTPUT
|
||||||
- name: Install rsync
|
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
|
||||||
run: |
|
echo "key-file=$key_file" >> $GITHUB_OUTPUT
|
||||||
if command -v rsync >/dev/null 2>&1; then
|
|
||||||
exit 0
|
- name: Install rsync
|
||||||
fi
|
run: |
|
||||||
CACHE_DIR="${CI_CACHE_ROOT:-/tmp}/apt"
|
if command -v rsync >/dev/null 2>&1; then
|
||||||
mkdir -p "$CACHE_DIR"
|
exit 0
|
||||||
KEY="rsync-$(lsb_release -rs 2>/dev/null || echo unknown)"
|
fi
|
||||||
DEB_DIR="$CACHE_DIR/$KEY"
|
CACHE_DIR="${CI_CACHE_ROOT:-/tmp}/apt"
|
||||||
mkdir -p "$DEB_DIR"
|
mkdir -p "$CACHE_DIR"
|
||||||
if ls "$DEB_DIR"/rsync*.deb >/dev/null 2>&1; then
|
KEY="rsync-$(lsb_release -rs 2>/dev/null || echo unknown)"
|
||||||
apt-get update
|
DEB_DIR="$CACHE_DIR/$KEY"
|
||||||
apt-get install -y --no-install-recommends "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb
|
mkdir -p "$DEB_DIR"
|
||||||
else
|
if ls "$DEB_DIR"/rsync*.deb >/dev/null 2>&1; then
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get download rsync libpopt0
|
apt-get install -y --no-install-recommends "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb
|
||||||
mv rsync*.deb libpopt0*.deb "$DEB_DIR"/
|
else
|
||||||
dpkg -i "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb || apt-get install -f -y
|
apt-get update
|
||||||
fi
|
apt-get download rsync libpopt0
|
||||||
|
mv rsync*.deb libpopt0*.deb "$DEB_DIR"/
|
||||||
- name: Fetch staging artefacts
|
dpkg -i "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb || apt-get install -f -y
|
||||||
id: fetch
|
fi
|
||||||
run: |
|
|
||||||
staging_root="${{ runner.temp }}/staging"
|
- name: Fetch staging artefacts
|
||||||
mkdir -p "$staging_root/service" "$staging_root/docs"
|
id: fetch
|
||||||
|
run: |
|
||||||
echo "📥 Copying service bundle from staging"
|
staging_root="${{ runner.temp }}/staging"
|
||||||
rsync -az --delete \
|
mkdir -p "$staging_root/service" "$staging_root/docs"
|
||||||
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
|
||||||
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs.path }}/" \
|
echo "📥 Copying service bundle from staging"
|
||||||
"$staging_root/service/"
|
rsync -az --delete \
|
||||||
|
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||||
if [ "${{ github.event.inputs.include_docs }}" = "true" ] && [ -n "${{ steps.staging.outputs['docs-path'] }}" ]; then
|
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs.path }}/" \
|
||||||
echo "📥 Copying documentation bundle from staging"
|
"$staging_root/service/"
|
||||||
rsync -az --delete \
|
|
||||||
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
if [ "${{ github.event.inputs.include_docs }}" = "true" ] && [ -n "${{ steps.staging.outputs['docs-path'] }}" ]; then
|
||||||
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs['docs-path'] }}/" \
|
echo "📥 Copying documentation bundle from staging"
|
||||||
"$staging_root/docs/"
|
rsync -az --delete \
|
||||||
else
|
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||||
echo "ℹ️ Documentation promotion skipped"
|
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs['docs-path'] }}/" \
|
||||||
fi
|
"$staging_root/docs/"
|
||||||
|
else
|
||||||
echo "service-dir=$staging_root/service" >> $GITHUB_OUTPUT
|
echo "ℹ️ Documentation promotion skipped"
|
||||||
echo "docs-dir=$staging_root/docs" >> $GITHUB_OUTPUT
|
fi
|
||||||
|
|
||||||
- name: Backup production service content
|
echo "service-dir=$staging_root/service" >> $GITHUB_OUTPUT
|
||||||
run: |
|
echo "docs-dir=$staging_root/docs" >> $GITHUB_OUTPUT
|
||||||
ssh -o StrictHostKeyChecking=no -i "${{ steps.production.outputs['key-file'] }}" \
|
|
||||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}" \
|
- name: Backup production service content
|
||||||
"set -e; TARGET='${{ steps.production.outputs.path }}'; \
|
run: |
|
||||||
if [ -d \"$TARGET\" ]; then \
|
ssh -o StrictHostKeyChecking=no -i "${{ steps.production.outputs['key-file'] }}" \
|
||||||
parent=\$(dirname \"$TARGET\"); \
|
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}" \
|
||||||
base=\$(basename \"$TARGET\"); \
|
"set -e; TARGET='${{ steps.production.outputs.path }}'; \
|
||||||
backup=\"\$parent/\${base}.backup.\$(date +%Y%m%d_%H%M%S)\"; \
|
if [ -d \"$TARGET\" ]; then \
|
||||||
mkdir -p \"\$backup\"; \
|
parent=\$(dirname \"$TARGET\"); \
|
||||||
rsync -a --delete \"$TARGET/\" \"\$backup/\"; \
|
base=\$(basename \"$TARGET\"); \
|
||||||
ls -dt \"\$parent/\${base}.backup.*\" 2>/dev/null | tail -n +6 | xargs rm -rf || true; \
|
backup=\"\$parent/\${base}.backup.\$(date +%Y%m%d_%H%M%S)\"; \
|
||||||
echo 'Backup created at ' \"\$backup\"; \
|
mkdir -p \"\$backup\"; \
|
||||||
else \
|
rsync -a --delete \"$TARGET/\" \"\$backup/\"; \
|
||||||
echo 'Production service path missing; skipping backup'; \
|
ls -dt \"\$parent/\${base}.backup.*\" 2>/dev/null | tail -n +6 | xargs rm -rf || true; \
|
||||||
fi"
|
echo 'Backup created at ' \"\$backup\"; \
|
||||||
|
else \
|
||||||
- name: Publish service to production
|
echo 'Production service path missing; skipping backup'; \
|
||||||
run: |
|
fi"
|
||||||
rsync -az --delete \
|
|
||||||
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
- name: Publish service to production
|
||||||
"${{ steps.fetch.outputs['service-dir'] }}/" \
|
run: |
|
||||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs.path }}/"
|
rsync -az --delete \
|
||||||
|
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||||
- name: Promote documentation bundle
|
"${{ steps.fetch.outputs['service-dir'] }}/" \
|
||||||
if: github.event.inputs.include_docs == 'true' && steps.production.outputs['docs-path'] != ''
|
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs.path }}/"
|
||||||
run: |
|
|
||||||
rsync -az --delete \
|
- name: Promote documentation bundle
|
||||||
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
if: github.event.inputs.include_docs == 'true' && steps.production.outputs['docs-path'] != ''
|
||||||
"${{ steps.fetch.outputs['docs-dir'] }}/" \
|
run: |
|
||||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs['docs-path'] }}/"
|
rsync -az --delete \
|
||||||
|
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||||
- name: Promotion summary
|
"${{ steps.fetch.outputs['docs-dir'] }}/" \
|
||||||
run: |
|
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs['docs-path'] }}/"
|
||||||
echo "✅ Promotion completed"
|
|
||||||
echo " Tag: ${{ github.event.inputs.tag }}"
|
- name: Promotion summary
|
||||||
echo " Service: ${{ steps.staging.outputs.host }} → ${{ steps.production.outputs.host }}"
|
run: |
|
||||||
if [ "${{ github.event.inputs.include_docs }}" = "true" ]; then
|
echo "✅ Promotion completed"
|
||||||
echo " Docs: included"
|
echo " Tag: ${{ github.event.inputs.tag }}"
|
||||||
else
|
echo " Service: ${{ steps.staging.outputs.host }} → ${{ steps.production.outputs.host }}"
|
||||||
echo " Docs: skipped"
|
if [ "${{ github.event.inputs.include_docs }}" = "true" ]; then
|
||||||
fi
|
echo " Docs: included"
|
||||||
|
else
|
||||||
|
echo " Docs: skipped"
|
||||||
|
fi
|
||||||
|
|||||||
24
.gitea/workflows/provenance-check.yml
Normal file
24
.gitea/workflows/provenance-check.yml
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
name: provenance-check
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Emit provenance summary
|
||||||
|
run: |
|
||||||
|
mkdir -p out/provenance
|
||||||
|
echo "run_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ")" > out/provenance/summary.txt
|
||||||
|
|
||||||
|
- name: Upload provenance summary
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: provenance-summary
|
||||||
|
path: out/provenance/**
|
||||||
306
.gitea/workflows/reachability-bench.yaml
Normal file
306
.gitea/workflows/reachability-bench.yaml
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
name: Reachability Benchmark
|
||||||
|
|
||||||
|
# Sprint: SPRINT_3500_0003_0001
|
||||||
|
# Task: CORPUS-009 - Create Gitea workflow for reachability benchmark
|
||||||
|
# Task: CORPUS-010 - Configure nightly + per-PR benchmark runs
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
baseline_version:
|
||||||
|
description: 'Baseline version to compare against'
|
||||||
|
required: false
|
||||||
|
default: 'latest'
|
||||||
|
verbose:
|
||||||
|
description: 'Enable verbose output'
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'datasets/reachability/**'
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/**'
|
||||||
|
- 'bench/reachability-benchmark/**'
|
||||||
|
- '.gitea/workflows/reachability-bench.yaml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'datasets/reachability/**'
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/**'
|
||||||
|
- 'bench/reachability-benchmark/**'
|
||||||
|
schedule:
|
||||||
|
# Nightly at 02:00 UTC
|
||||||
|
- cron: '0 2 * * *'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
benchmark:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
STELLAOPS_OFFLINE: 'true'
|
||||||
|
STELLAOPS_DETERMINISTIC: 'true'
|
||||||
|
outputs:
|
||||||
|
precision: ${{ steps.metrics.outputs.precision }}
|
||||||
|
recall: ${{ steps.metrics.outputs.recall }}
|
||||||
|
f1: ${{ steps.metrics.outputs.f1 }}
|
||||||
|
pr_auc: ${{ steps.metrics.outputs.pr_auc }}
|
||||||
|
regression: ${{ steps.compare.outputs.regression }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET 10
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/.nuget/packages
|
||||||
|
key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.csproj') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-nuget-
|
||||||
|
|
||||||
|
- name: Restore benchmark project
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||||
|
--configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build benchmark project
|
||||||
|
run: |
|
||||||
|
dotnet build src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-restore
|
||||||
|
|
||||||
|
- name: Validate corpus integrity
|
||||||
|
run: |
|
||||||
|
echo "::group::Validating corpus index"
|
||||||
|
if [ ! -f datasets/reachability/corpus.json ]; then
|
||||||
|
echo "::error::corpus.json not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
python3 -c "import json; data = json.load(open('datasets/reachability/corpus.json')); print(f'Corpus contains {len(data.get(\"samples\", []))} samples')"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Run benchmark
|
||||||
|
id: benchmark
|
||||||
|
run: |
|
||||||
|
echo "::group::Running reachability benchmark"
|
||||||
|
mkdir -p bench/results
|
||||||
|
|
||||||
|
# Run the corpus benchmark
|
||||||
|
dotnet run \
|
||||||
|
--project src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
-- corpus run \
|
||||||
|
--corpus datasets/reachability/corpus.json \
|
||||||
|
--output bench/results/benchmark-${{ github.sha }}.json \
|
||||||
|
--format json \
|
||||||
|
${{ inputs.verbose == 'true' && '--verbose' || '' }}
|
||||||
|
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Extract metrics
|
||||||
|
id: metrics
|
||||||
|
run: |
|
||||||
|
echo "::group::Extracting metrics"
|
||||||
|
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||||
|
|
||||||
|
if [ -f "$RESULT_FILE" ]; then
|
||||||
|
PRECISION=$(jq -r '.metrics.precision // 0' "$RESULT_FILE")
|
||||||
|
RECALL=$(jq -r '.metrics.recall // 0' "$RESULT_FILE")
|
||||||
|
F1=$(jq -r '.metrics.f1 // 0' "$RESULT_FILE")
|
||||||
|
PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$RESULT_FILE")
|
||||||
|
|
||||||
|
echo "precision=$PRECISION" >> $GITHUB_OUTPUT
|
||||||
|
echo "recall=$RECALL" >> $GITHUB_OUTPUT
|
||||||
|
echo "f1=$F1" >> $GITHUB_OUTPUT
|
||||||
|
echo "pr_auc=$PR_AUC" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
echo "Precision: $PRECISION"
|
||||||
|
echo "Recall: $RECALL"
|
||||||
|
echo "F1: $F1"
|
||||||
|
echo "PR-AUC: $PR_AUC"
|
||||||
|
else
|
||||||
|
echo "::error::Benchmark result file not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Get baseline
|
||||||
|
id: baseline
|
||||||
|
run: |
|
||||||
|
echo "::group::Loading baseline"
|
||||||
|
BASELINE_VERSION="${{ inputs.baseline_version || 'latest' }}"
|
||||||
|
|
||||||
|
if [ "$BASELINE_VERSION" = "latest" ]; then
|
||||||
|
BASELINE_FILE=$(ls -t bench/baselines/*.json 2>/dev/null | head -1)
|
||||||
|
else
|
||||||
|
BASELINE_FILE="bench/baselines/$BASELINE_VERSION.json"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f "$BASELINE_FILE" ]; then
|
||||||
|
echo "baseline_file=$BASELINE_FILE" >> $GITHUB_OUTPUT
|
||||||
|
echo "Using baseline: $BASELINE_FILE"
|
||||||
|
else
|
||||||
|
echo "::warning::No baseline found, skipping comparison"
|
||||||
|
echo "baseline_file=" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Compare to baseline
|
||||||
|
id: compare
|
||||||
|
if: steps.baseline.outputs.baseline_file != ''
|
||||||
|
run: |
|
||||||
|
echo "::group::Comparing to baseline"
|
||||||
|
BASELINE_FILE="${{ steps.baseline.outputs.baseline_file }}"
|
||||||
|
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||||
|
|
||||||
|
# Extract baseline metrics
|
||||||
|
BASELINE_PRECISION=$(jq -r '.metrics.precision // 0' "$BASELINE_FILE")
|
||||||
|
BASELINE_RECALL=$(jq -r '.metrics.recall // 0' "$BASELINE_FILE")
|
||||||
|
BASELINE_PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$BASELINE_FILE")
|
||||||
|
|
||||||
|
# Extract current metrics
|
||||||
|
CURRENT_PRECISION=$(jq -r '.metrics.precision // 0' "$RESULT_FILE")
|
||||||
|
CURRENT_RECALL=$(jq -r '.metrics.recall // 0' "$RESULT_FILE")
|
||||||
|
CURRENT_PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$RESULT_FILE")
|
||||||
|
|
||||||
|
# Calculate deltas
|
||||||
|
PRECISION_DELTA=$(echo "$CURRENT_PRECISION - $BASELINE_PRECISION" | bc -l)
|
||||||
|
RECALL_DELTA=$(echo "$CURRENT_RECALL - $BASELINE_RECALL" | bc -l)
|
||||||
|
PR_AUC_DELTA=$(echo "$CURRENT_PR_AUC - $BASELINE_PR_AUC" | bc -l)
|
||||||
|
|
||||||
|
echo "Precision delta: $PRECISION_DELTA"
|
||||||
|
echo "Recall delta: $RECALL_DELTA"
|
||||||
|
echo "PR-AUC delta: $PR_AUC_DELTA"
|
||||||
|
|
||||||
|
# Check for regression (PR-AUC drop > 2%)
|
||||||
|
REGRESSION_THRESHOLD=-0.02
|
||||||
|
if (( $(echo "$PR_AUC_DELTA < $REGRESSION_THRESHOLD" | bc -l) )); then
|
||||||
|
echo "::error::PR-AUC regression detected: $PR_AUC_DELTA (threshold: $REGRESSION_THRESHOLD)"
|
||||||
|
echo "regression=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "regression=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Generate markdown report
|
||||||
|
run: |
|
||||||
|
echo "::group::Generating report"
|
||||||
|
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||||
|
REPORT_FILE="bench/results/benchmark-${{ github.sha }}.md"
|
||||||
|
|
||||||
|
cat > "$REPORT_FILE" << 'EOF'
|
||||||
|
# Reachability Benchmark Report
|
||||||
|
|
||||||
|
**Commit:** ${{ github.sha }}
|
||||||
|
**Run:** ${{ github.run_number }}
|
||||||
|
**Date:** $(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
|
||||||
|
## Metrics
|
||||||
|
|
||||||
|
| Metric | Value |
|
||||||
|
|--------|-------|
|
||||||
|
| Precision | ${{ steps.metrics.outputs.precision }} |
|
||||||
|
| Recall | ${{ steps.metrics.outputs.recall }} |
|
||||||
|
| F1 Score | ${{ steps.metrics.outputs.f1 }} |
|
||||||
|
| PR-AUC | ${{ steps.metrics.outputs.pr_auc }} |
|
||||||
|
|
||||||
|
## Comparison
|
||||||
|
|
||||||
|
${{ steps.compare.outputs.regression == 'true' && '⚠️ **REGRESSION DETECTED**' || '✅ No regression' }}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Report generated: $REPORT_FILE"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Upload results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: benchmark-results-${{ github.sha }}
|
||||||
|
path: |
|
||||||
|
bench/results/benchmark-${{ github.sha }}.json
|
||||||
|
bench/results/benchmark-${{ github.sha }}.md
|
||||||
|
retention-days: 90
|
||||||
|
|
||||||
|
- name: Fail on regression
|
||||||
|
if: steps.compare.outputs.regression == 'true' && github.event_name == 'pull_request'
|
||||||
|
run: |
|
||||||
|
echo "::error::Benchmark regression detected. PR-AUC dropped below threshold."
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
update-baseline:
|
||||||
|
needs: benchmark
|
||||||
|
if: github.event_name == 'push' && github.ref == 'refs/heads/main' && needs.benchmark.outputs.regression != 'true'
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download results
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: benchmark-results-${{ github.sha }}
|
||||||
|
path: bench/results/
|
||||||
|
|
||||||
|
- name: Update baseline (nightly only)
|
||||||
|
if: github.event_name == 'schedule'
|
||||||
|
run: |
|
||||||
|
DATE=$(date +%Y%m%d)
|
||||||
|
cp bench/results/benchmark-${{ github.sha }}.json bench/baselines/baseline-$DATE.json
|
||||||
|
echo "Updated baseline to baseline-$DATE.json"
|
||||||
|
|
||||||
|
notify-pr:
|
||||||
|
needs: benchmark
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- name: Comment on PR
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const precision = '${{ needs.benchmark.outputs.precision }}';
|
||||||
|
const recall = '${{ needs.benchmark.outputs.recall }}';
|
||||||
|
const f1 = '${{ needs.benchmark.outputs.f1 }}';
|
||||||
|
const prAuc = '${{ needs.benchmark.outputs.pr_auc }}';
|
||||||
|
const regression = '${{ needs.benchmark.outputs.regression }}' === 'true';
|
||||||
|
|
||||||
|
const status = regression ? '⚠️ REGRESSION' : '✅ PASS';
|
||||||
|
|
||||||
|
const body = `## Reachability Benchmark Results ${status}
|
||||||
|
|
||||||
|
| Metric | Value |
|
||||||
|
|--------|-------|
|
||||||
|
| Precision | ${precision} |
|
||||||
|
| Recall | ${recall} |
|
||||||
|
| F1 Score | ${f1} |
|
||||||
|
| PR-AUC | ${prAuc} |
|
||||||
|
|
||||||
|
${regression ? '### ⚠️ Regression Detected\nPR-AUC dropped below threshold. Please review changes.' : ''}
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Details</summary>
|
||||||
|
|
||||||
|
- Commit: \`${{ github.sha }}\`
|
||||||
|
- Run: [#${{ github.run_number }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||||
|
|
||||||
|
</details>`;
|
||||||
|
|
||||||
|
github.rest.issues.createComment({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body: body
|
||||||
|
});
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user