Compare commits
4 Commits
98e6b76584
...
11597679ed
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
11597679ed | ||
|
|
e3f28a21ab | ||
|
|
a403979177 | ||
|
|
b8641b1959 |
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: Concelier STORE-AOC-19-005 Dataset
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-dataset:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/out/linksets
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: sudo apt-get update && sudo apt-get install -y zstd
|
||||||
|
|
||||||
|
- name: Build dataset tarball
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/concelier/build-store-aoc-19-005-dataset.sh scripts/concelier/test-store-aoc-19-005-dataset.sh
|
||||||
|
scripts/concelier/build-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||||
|
|
||||||
|
- name: Validate dataset
|
||||||
|
run: scripts/concelier/test-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||||
|
|
||||||
|
- name: Upload dataset artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: concelier-store-aoc-19-005-dataset
|
||||||
|
path: |
|
||||||
|
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst
|
||||||
|
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst.sha256
|
||||||
@@ -35,8 +35,8 @@ jobs:
|
|||||||
- name: Lint
|
- name: Lint
|
||||||
run: npm run lint -- --no-progress
|
run: npm run lint -- --no-progress
|
||||||
|
|
||||||
- name: Unit tests
|
- name: Console export specs (targeted)
|
||||||
run: npm test -- --watch=false --browsers=ChromeHeadless --no-progress || true
|
run: bash ./scripts/ci-console-exports.sh
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: npm run build -- --configuration=production --progress=false
|
run: npm run build -- --configuration=production --progress=false
|
||||||
|
|||||||
32
.gitea/workflows/console-runner-image.yml
Normal file
32
.gitea/workflows/console-runner-image.yml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: console-runner-image
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- 'ops/devops/console/**'
|
||||||
|
- '.gitea/workflows/console-runner-image.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-runner-image:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Build runner image tarball (baked caches)
|
||||||
|
env:
|
||||||
|
RUN_ID: ${{ github.run_id }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
chmod +x ops/devops/console/build-runner-image.sh ops/devops/console/build-runner-image-ci.sh
|
||||||
|
ops/devops/console/build-runner-image-ci.sh
|
||||||
|
|
||||||
|
- name: Upload runner image artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: console-runner-image-${{ github.run_id }}
|
||||||
|
path: ops/devops/artifacts/console-runner/
|
||||||
|
retention-days: 14
|
||||||
@@ -22,20 +22,31 @@ on:
|
|||||||
description: "Version tag (e.g., 2025.10.0-edge)"
|
description: "Version tag (e.g., 2025.10.0-edge)"
|
||||||
required: false
|
required: false
|
||||||
default: "2025.10.0-edge"
|
default: "2025.10.0-edge"
|
||||||
|
skip_tests:
|
||||||
|
description: "Skip integration tests"
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
|
||||||
env:
|
env:
|
||||||
IMAGE_NAME: registry.stella-ops.org/stellaops/wine-csp
|
IMAGE_NAME: registry.stella-ops.org/stellaops/wine-csp
|
||||||
DOCKERFILE: ops/wine-csp/Dockerfile
|
DOCKERFILE: ops/wine-csp/Dockerfile
|
||||||
# Wine CSP only supports linux/amd64 (Wine ARM64 has compatibility issues with Windows x64 apps)
|
# Wine CSP only supports linux/amd64 (Wine ARM64 has compatibility issues with Windows x64 apps)
|
||||||
PLATFORMS: linux/amd64
|
PLATFORMS: linux/amd64
|
||||||
|
PYTHON_VERSION: "3.11"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
# ===========================================================================
|
||||||
|
# Job 1: Build Docker Image
|
||||||
|
# ===========================================================================
|
||||||
build:
|
build:
|
||||||
name: Build Wine CSP Image
|
name: Build Wine CSP Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write
|
packages: write
|
||||||
|
outputs:
|
||||||
|
image_tag: ${{ steps.version.outputs.tag }}
|
||||||
|
image_digest: ${{ steps.build.outputs.digest }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
@@ -48,12 +59,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
install: true
|
install: true
|
||||||
|
|
||||||
- name: Install syft (SBOM generation)
|
|
||||||
uses: anchore/sbom-action/download-syft@v0
|
|
||||||
|
|
||||||
- name: Install cosign (attestation)
|
|
||||||
uses: sigstore/cosign-installer@v3.7.0
|
|
||||||
|
|
||||||
- name: Set version tag
|
- name: Set version tag
|
||||||
id: version
|
id: version
|
||||||
run: |
|
run: |
|
||||||
@@ -74,7 +79,7 @@ jobs:
|
|||||||
type=raw,value=${{ steps.version.outputs.tag }}
|
type=raw,value=${{ steps.version.outputs.tag }}
|
||||||
type=sha,format=short
|
type=sha,format=short
|
||||||
|
|
||||||
- name: Build image (no push)
|
- name: Build image
|
||||||
id: build
|
id: build
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
@@ -88,66 +93,246 @@ jobs:
|
|||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
|
||||||
- name: Test container startup
|
- name: Save image for testing
|
||||||
run: |
|
run: |
|
||||||
set -e
|
mkdir -p /tmp/images
|
||||||
echo "Starting Wine CSP container for health check test..."
|
docker save "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}" | gzip > /tmp/images/wine-csp.tar.gz
|
||||||
|
|
||||||
# Run container in detached mode
|
- name: Upload image artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: wine-csp-image
|
||||||
|
path: /tmp/images/wine-csp.tar.gz
|
||||||
|
retention-days: 1
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Job 2: Integration Tests
|
||||||
|
# ===========================================================================
|
||||||
|
test:
|
||||||
|
name: Integration Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
if: ${{ github.event.inputs.skip_tests != 'true' }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download image artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: wine-csp-image
|
||||||
|
path: /tmp/images
|
||||||
|
|
||||||
|
- name: Load Docker image
|
||||||
|
run: |
|
||||||
|
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
|
- name: Install test dependencies
|
||||||
|
run: |
|
||||||
|
pip install -r ops/wine-csp/tests/requirements.txt
|
||||||
|
|
||||||
|
- name: Start Wine CSP container
|
||||||
|
id: container
|
||||||
|
run: |
|
||||||
|
echo "Starting Wine CSP container..."
|
||||||
docker run -d --name wine-csp-test \
|
docker run -d --name wine-csp-test \
|
||||||
-e WINE_CSP_MODE=limited \
|
-e WINE_CSP_MODE=limited \
|
||||||
-e WINE_CSP_LOG_LEVEL=Debug \
|
-e WINE_CSP_LOG_LEVEL=Debug \
|
||||||
-p 5099:5099 \
|
-p 5099:5099 \
|
||||||
"${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}"
|
"${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
||||||
|
|
||||||
# Wait for container startup (Wine takes time to initialize)
|
echo "container_id=$(docker ps -q -f name=wine-csp-test)" >> $GITHUB_OUTPUT
|
||||||
echo "Waiting for container startup (90s max)..."
|
|
||||||
for i in $(seq 1 18); do
|
- name: Wait for service startup
|
||||||
sleep 5
|
run: |
|
||||||
|
echo "Waiting for Wine CSP service to be ready (up to 120s)..."
|
||||||
|
for i in $(seq 1 24); do
|
||||||
if curl -sf http://127.0.0.1:5099/health > /dev/null 2>&1; then
|
if curl -sf http://127.0.0.1:5099/health > /dev/null 2>&1; then
|
||||||
echo "Health check passed after $((i * 5))s"
|
echo "Service ready after $((i * 5))s"
|
||||||
break
|
exit 0
|
||||||
fi
|
fi
|
||||||
echo "Waiting... ($((i * 5))s elapsed)"
|
echo "Waiting... ($((i * 5))s elapsed)"
|
||||||
|
sleep 5
|
||||||
done
|
done
|
||||||
|
echo "Service failed to start!"
|
||||||
|
docker logs wine-csp-test
|
||||||
|
exit 1
|
||||||
|
|
||||||
# Final health check
|
- name: Run integration tests (pytest)
|
||||||
echo "Final health check:"
|
id: pytest
|
||||||
curl -sf http://127.0.0.1:5099/health || {
|
run: |
|
||||||
echo "Health check failed!"
|
mkdir -p test-results
|
||||||
docker logs wine-csp-test
|
export WINE_CSP_URL=http://127.0.0.1:5099
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test status endpoint
|
pytest ops/wine-csp/tests/test_wine_csp.py \
|
||||||
echo "Testing /status endpoint:"
|
-v \
|
||||||
curl -sf http://127.0.0.1:5099/status | jq .
|
--tb=short \
|
||||||
|
--junitxml=test-results/junit.xml \
|
||||||
|
--timeout=60 \
|
||||||
|
-x \
|
||||||
|
2>&1 | tee test-results/pytest-output.txt
|
||||||
|
|
||||||
# Cleanup
|
- name: Run shell integration tests
|
||||||
docker stop wine-csp-test
|
if: always()
|
||||||
docker rm wine-csp-test
|
run: |
|
||||||
|
chmod +x ops/wine-csp/tests/run-tests.sh
|
||||||
|
ops/wine-csp/tests/run-tests.sh \
|
||||||
|
--url http://127.0.0.1:5099 \
|
||||||
|
--ci \
|
||||||
|
--verbose || true
|
||||||
|
|
||||||
echo "Container tests passed!"
|
- name: Collect container logs
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker logs wine-csp-test > test-results/container.log 2>&1 || true
|
||||||
|
|
||||||
|
- name: Stop container
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker stop wine-csp-test || true
|
||||||
|
docker rm wine-csp-test || true
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: wine-csp-test-results
|
||||||
|
path: test-results/
|
||||||
|
|
||||||
|
- name: Publish test results
|
||||||
|
uses: mikepenz/action-junit-report@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
report_paths: 'test-results/junit.xml'
|
||||||
|
check_name: 'Wine CSP Integration Tests'
|
||||||
|
fail_on_failure: true
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Job 3: Security Scan
|
||||||
|
# ===========================================================================
|
||||||
|
security:
|
||||||
|
name: Security Scan
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
permissions:
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download image artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: wine-csp-image
|
||||||
|
path: /tmp/images
|
||||||
|
|
||||||
|
- name: Load Docker image
|
||||||
|
run: |
|
||||||
|
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
||||||
|
|
||||||
|
- name: Run Trivy vulnerability scanner
|
||||||
|
uses: aquasecurity/trivy-action@master
|
||||||
|
with:
|
||||||
|
image-ref: "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
||||||
|
format: 'sarif'
|
||||||
|
output: 'trivy-results.sarif'
|
||||||
|
severity: 'CRITICAL,HIGH'
|
||||||
|
ignore-unfixed: true
|
||||||
|
|
||||||
|
- name: Upload Trivy scan results
|
||||||
|
uses: github/codeql-action/upload-sarif@v3
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
sarif_file: 'trivy-results.sarif'
|
||||||
|
|
||||||
|
- name: Run Trivy for JSON report
|
||||||
|
uses: aquasecurity/trivy-action@master
|
||||||
|
with:
|
||||||
|
image-ref: "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
||||||
|
format: 'json'
|
||||||
|
output: 'trivy-results.json'
|
||||||
|
severity: 'CRITICAL,HIGH,MEDIUM'
|
||||||
|
|
||||||
|
- name: Upload Trivy JSON report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: wine-csp-security-scan
|
||||||
|
path: trivy-results.json
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Job 4: Generate SBOM
|
||||||
|
# ===========================================================================
|
||||||
|
sbom:
|
||||||
|
name: Generate SBOM
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Download image artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: wine-csp-image
|
||||||
|
path: /tmp/images
|
||||||
|
|
||||||
|
- name: Load Docker image
|
||||||
|
run: |
|
||||||
|
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
||||||
|
|
||||||
|
- name: Install syft
|
||||||
|
uses: anchore/sbom-action/download-syft@v0
|
||||||
|
|
||||||
- name: Generate SBOM (SPDX)
|
- name: Generate SBOM (SPDX)
|
||||||
run: |
|
run: |
|
||||||
mkdir -p out/sbom
|
mkdir -p out/sbom
|
||||||
syft "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}" \
|
syft "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" \
|
||||||
-o spdx-json=out/sbom/wine-csp.spdx.json
|
-o spdx-json=out/sbom/wine-csp.spdx.json
|
||||||
|
|
||||||
- name: Generate SBOM (CycloneDX)
|
- name: Generate SBOM (CycloneDX)
|
||||||
run: |
|
run: |
|
||||||
syft "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}" \
|
syft "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" \
|
||||||
-o cyclonedx-json=out/sbom/wine-csp.cdx.json
|
-o cyclonedx-json=out/sbom/wine-csp.cdx.json
|
||||||
|
|
||||||
- name: Upload SBOM artifacts
|
- name: Upload SBOM artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: wine-csp-sbom-${{ steps.version.outputs.tag }}
|
name: wine-csp-sbom-${{ needs.build.outputs.image_tag }}
|
||||||
path: out/sbom/
|
path: out/sbom/
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Job 5: Publish (only on main branch or manual trigger)
|
||||||
|
# ===========================================================================
|
||||||
|
publish:
|
||||||
|
name: Publish Image
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [build, test, security]
|
||||||
|
if: ${{ (github.event.inputs.push == 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main')) && needs.test.result == 'success' }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
id-token: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Download image artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: wine-csp-image
|
||||||
|
path: /tmp/images
|
||||||
|
|
||||||
|
- name: Load Docker image
|
||||||
|
run: |
|
||||||
|
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3.7.0
|
||||||
|
|
||||||
- name: Login to registry
|
- name: Login to registry
|
||||||
if: ${{ github.event.inputs.push == 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main') }}
|
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: registry.stella-ops.org
|
registry: registry.stella-ops.org
|
||||||
@@ -155,57 +340,110 @@ jobs:
|
|||||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||||
|
|
||||||
- name: Push to registry
|
- name: Push to registry
|
||||||
if: ${{ github.event.inputs.push == 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main') }}
|
|
||||||
run: |
|
run: |
|
||||||
docker push "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}"
|
docker push "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
||||||
docker push "${{ env.IMAGE_NAME }}:sha-${{ github.sha }}"
|
|
||||||
|
# Also tag as latest if on main
|
||||||
|
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||||
|
docker tag "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" "${{ env.IMAGE_NAME }}:latest"
|
||||||
|
docker push "${{ env.IMAGE_NAME }}:latest"
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Sign image with cosign
|
- name: Sign image with cosign
|
||||||
if: ${{ github.event.inputs.push == 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main') }}
|
|
||||||
env:
|
env:
|
||||||
COSIGN_EXPERIMENTAL: "1"
|
COSIGN_EXPERIMENTAL: "1"
|
||||||
run: |
|
run: |
|
||||||
# Sign with keyless signing (requires OIDC)
|
cosign sign --yes "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" || echo "Signing skipped (no OIDC available)"
|
||||||
cosign sign --yes "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}" || echo "Signing skipped (no OIDC available)"
|
|
||||||
|
|
||||||
- name: Build air-gap bundle
|
- name: Create release summary
|
||||||
|
run: |
|
||||||
|
echo "## Wine CSP Image Published" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**Image:** \`${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}\`" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**WARNING:** This image is for TEST VECTOR GENERATION ONLY." >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Job 6: Air-Gap Bundle
|
||||||
|
# ===========================================================================
|
||||||
|
airgap:
|
||||||
|
name: Air-Gap Bundle
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [build, test]
|
||||||
|
if: ${{ needs.test.result == 'success' }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Download image artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: wine-csp-image
|
||||||
|
path: /tmp/images
|
||||||
|
|
||||||
|
- name: Create air-gap bundle
|
||||||
run: |
|
run: |
|
||||||
mkdir -p out/bundles
|
mkdir -p out/bundles
|
||||||
docker save "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}" | gzip > out/bundles/wine-csp-${{ steps.version.outputs.tag }}.tar.gz
|
|
||||||
|
# Copy the image tarball
|
||||||
|
cp /tmp/images/wine-csp.tar.gz out/bundles/wine-csp-${{ needs.build.outputs.image_tag }}.tar.gz
|
||||||
|
|
||||||
# Generate bundle manifest
|
# Generate bundle manifest
|
||||||
cat > out/bundles/wine-csp-${{ steps.version.outputs.tag }}.manifest.json <<EOF
|
cat > out/bundles/wine-csp-${{ needs.build.outputs.image_tag }}.manifest.json <<EOF
|
||||||
{
|
{
|
||||||
"name": "wine-csp",
|
"name": "wine-csp",
|
||||||
"version": "${{ steps.version.outputs.tag }}",
|
"version": "${{ needs.build.outputs.image_tag }}",
|
||||||
"image": "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}",
|
"image": "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}",
|
||||||
"platform": "linux/amd64",
|
"platform": "linux/amd64",
|
||||||
"sha256": "$(sha256sum out/bundles/wine-csp-${{ steps.version.outputs.tag }}.tar.gz | cut -d' ' -f1)",
|
"sha256": "$(sha256sum out/bundles/wine-csp-${{ needs.build.outputs.image_tag }}.tar.gz | cut -d' ' -f1)",
|
||||||
"created": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
"created": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||||
"git_commit": "${{ github.sha }}",
|
"git_commit": "${{ github.sha }}",
|
||||||
|
"git_ref": "${{ github.ref }}",
|
||||||
"warning": "FOR TEST VECTOR GENERATION ONLY - NOT FOR PRODUCTION SIGNING"
|
"warning": "FOR TEST VECTOR GENERATION ONLY - NOT FOR PRODUCTION SIGNING"
|
||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
echo "Air-gap bundle created:"
|
# Create checksums file
|
||||||
ls -lh out/bundles/
|
cd out/bundles
|
||||||
|
sha256sum *.tar.gz *.json > SHA256SUMS
|
||||||
|
|
||||||
|
echo "Air-gap bundle contents:"
|
||||||
|
ls -lh
|
||||||
|
|
||||||
- name: Upload air-gap bundle
|
- name: Upload air-gap bundle
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: wine-csp-bundle-${{ steps.version.outputs.tag }}
|
name: wine-csp-bundle-${{ needs.build.outputs.image_tag }}
|
||||||
path: out/bundles/
|
path: out/bundles/
|
||||||
|
|
||||||
- name: Security scan with Trivy
|
# ===========================================================================
|
||||||
uses: aquasecurity/trivy-action@master
|
# Job 7: Test Summary
|
||||||
with:
|
# ===========================================================================
|
||||||
image-ref: "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}"
|
summary:
|
||||||
format: 'sarif'
|
name: Test Summary
|
||||||
output: 'trivy-results.sarif'
|
runs-on: ubuntu-latest
|
||||||
severity: 'CRITICAL,HIGH'
|
needs: [build, test, security, sbom]
|
||||||
|
if: always()
|
||||||
|
|
||||||
- name: Upload Trivy scan results
|
steps:
|
||||||
uses: github/codeql-action/upload-sarif@v3
|
- name: Download test results
|
||||||
if: always()
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
sarif_file: 'trivy-results.sarif'
|
name: wine-csp-test-results
|
||||||
|
path: test-results/
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Create summary
|
||||||
|
run: |
|
||||||
|
echo "## Wine CSP Build Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Stage | Status |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|-------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Build | ${{ needs.build.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Tests | ${{ needs.test.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Security | ${{ needs.security.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| SBOM | ${{ needs.sbom.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**Image Tag:** \`${{ needs.build.outputs.image_tag }}\`" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "---" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**SECURITY WARNING:** Wine CSP is for TEST VECTOR GENERATION ONLY." >> $GITHUB_STEP_SUMMARY
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -64,3 +64,5 @@ coverage/
|
|||||||
local-nugets/
|
local-nugets/
|
||||||
local-nuget/
|
local-nuget/
|
||||||
src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1+12
|
src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1+12
|
||||||
|
.nuget-cache/
|
||||||
|
.nuget-temp/
|
||||||
@@ -1,26 +1,23 @@
|
|||||||
# Export Center Gateway Contract (draft placeholder)
|
# Export Center Gateway Contract (draft v0.9)
|
||||||
|
|
||||||
**Status:** Draft v0.2 · owner-proposed
|
Scope: proxy Export Center APIs through the Web gateway with tenant scoping, deterministic responses, sealed-mode readiness, and offline-friendly signed URL handling.
|
||||||
|
|
||||||
## Scope
|
## Security / headers
|
||||||
- Profile, run, download, and distribution routes proxied via Web gateway.
|
- `Authorization: DPoP <token>`, `DPoP: <proof>`
|
||||||
- Tenant scoping, RBAC/ABAC, streaming limits, retention/encryption parameters, signed URL policy.
|
- `X-StellaOps-Tenant: <tenantId>` (required)
|
||||||
|
- `X-StellaOps-Project: <projectId>` (optional)
|
||||||
|
- `Idempotency-Key: <uuid>` (recommended for POST)
|
||||||
|
- `Accept: application/json` (or `text/event-stream` for SSE)
|
||||||
|
- Scopes (proposal): `export:read` for GET, `export:write` for POST.
|
||||||
|
|
||||||
## Endpoints
|
## Endpoints
|
||||||
- `GET /export-center/profiles` — list export profiles (tenant-scoped).
|
- `GET /export-center/profiles` — list export profiles (tenant-scoped).
|
||||||
- `POST /export-center/runs` — start an export run.
|
- `POST /export-center/runs` — start an export run.
|
||||||
- `GET /export-center/runs/{runId}` — run status and artifacts.
|
- `GET /export-center/runs/{runId}` — run status + outputs.
|
||||||
- `GET /export-center/runs/{runId}/events` — SSE for run progress.
|
- `GET /export-center/runs/{runId}/events` — SSE progress stream.
|
||||||
- `GET /export-center/distributions/{id}` — fetch signed URLs for OCI/object storage distribution.
|
- `GET /export-center/distributions/{id}` — signed URLs for OCI/object storage distribution.
|
||||||
|
|
||||||
## Security / headers
|
## POST /export-center/runs (request)
|
||||||
- `Authorization: DPoP <token>`; `DPoP: <proof>`
|
|
||||||
- `X-StellaOps-Tenant: <tenantId>` (required)
|
|
||||||
- `X-StellaOps-Project: <projectId>` (optional)
|
|
||||||
- `Idempotency-Key` (recommended for POST)
|
|
||||||
- Required scopes (proposal): `export:read`, `export:write`.
|
|
||||||
|
|
||||||
## Request: POST /export-center/runs
|
|
||||||
```jsonc
|
```jsonc
|
||||||
{
|
{
|
||||||
"profileId": "export-profile::tenant-default::daily-vex",
|
"profileId": "export-profile::tenant-default::daily-vex",
|
||||||
@@ -37,10 +34,21 @@
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Response: 202 Accepted
|
## 202 Accepted
|
||||||
- `runId`, `status: queued|running|succeeded|failed|expired`, `estimateSeconds`, `retryAfter`.
|
```jsonc
|
||||||
|
{
|
||||||
|
"runId": "export-run::tenant-default::2025-12-06::0003",
|
||||||
|
"status": "queued",
|
||||||
|
"estimateSeconds": 420,
|
||||||
|
"links": {
|
||||||
|
"status": "/export-center/runs/export-run::tenant-default::2025-12-06::0003",
|
||||||
|
"events": "/export-center/runs/export-run::tenant-default::2025-12-06::0003/events"
|
||||||
|
},
|
||||||
|
"retryAfter": 5
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Response: GET run
|
## GET /export-center/runs/{runId}
|
||||||
```jsonc
|
```jsonc
|
||||||
{
|
{
|
||||||
"runId": "export-run::tenant-default::2025-12-06::0003",
|
"runId": "export-run::tenant-default::2025-12-06::0003",
|
||||||
@@ -48,7 +56,14 @@
|
|||||||
"profileId": "export-profile::tenant-default::daily-vex",
|
"profileId": "export-profile::tenant-default::daily-vex",
|
||||||
"startedAt": "2025-12-06T10:00:00Z",
|
"startedAt": "2025-12-06T10:00:00Z",
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{ "type": "manifest", "format": "json", "url": "https://exports.local/.../manifest.json?sig=...", "sha256": "...", "expiresAt": "2025-12-06T16:00:00Z" }
|
{
|
||||||
|
"type": "manifest",
|
||||||
|
"format": "json",
|
||||||
|
"url": "https://exports.local/tenant-default/0003/manifest.json?sig=...",
|
||||||
|
"sha256": "sha256:c0ffee...",
|
||||||
|
"dsseUrl": "https://exports.local/tenant-default/0003/manifest.dsse?sig=...",
|
||||||
|
"expiresAt": "2025-12-06T16:00:00Z"
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"progress": { "percent": 35, "itemsCompleted": 70, "itemsTotal": 200 },
|
"progress": { "percent": 35, "itemsCompleted": 70, "itemsTotal": 200 },
|
||||||
"errors": []
|
"errors": []
|
||||||
@@ -56,11 +71,22 @@
|
|||||||
```
|
```
|
||||||
|
|
||||||
## SSE events
|
## SSE events
|
||||||
- `started`, `progress`, `artifact_ready` (url, sha256, type), `completed`, `failed` (code, message).
|
- `started`: `{ runId, status }`
|
||||||
|
- `progress`: `{ runId, percent, itemsCompleted, itemsTotal }`
|
||||||
|
- `artifact_ready`: `{ runId, type, id, url, sha256, format }`
|
||||||
|
- `completed`: `{ runId, status: "succeeded", manifestUrl, manifestDsseUrl? }`
|
||||||
|
- `failed`: `{ runId, status: "failed", code, message, retryAfterSeconds? }`
|
||||||
|
|
||||||
## Limits (proposal)
|
## Distributions
|
||||||
- Max request body 256 KiB; max targets 50; default timeout 60 minutes.
|
- `GET /export-center/distributions/{id}` returns signed URLs, expiry, checksum, and optional DSSE envelope reference.
|
||||||
- Idle SSE timeout 60s; backoff with `Retry-After`.
|
- Response headers: `Cache-Control: private, max-age=60, stale-if-error=300`; `ETag` over sorted payload.
|
||||||
|
- Signed URL rels: `self`, `alternate` (DSSE), `bundle` when tar/zip produced.
|
||||||
|
|
||||||
|
## Determinism & limits
|
||||||
|
- Max request body 256 KiB; max targets 50; max outputs 1000 assets/export; max bundle size 500 MiB compressed.
|
||||||
|
- Default job timeout 60 minutes; idle SSE timeout 60s; client backoff `1s,2s,4s,8s` capped at 30s; honor `Retry-After`.
|
||||||
|
- Ordering: manifest items sorted `(type asc, id asc, format asc, url asc)`.
|
||||||
|
- Timestamps: ISO-8601 UTC; stable SHA-256 hashes only.
|
||||||
|
|
||||||
## Error codes (proposal)
|
## Error codes (proposal)
|
||||||
- `ERR_EXPORT_PROFILE_NOT_FOUND`
|
- `ERR_EXPORT_PROFILE_NOT_FOUND`
|
||||||
@@ -68,12 +94,13 @@
|
|||||||
- `ERR_EXPORT_TOO_LARGE`
|
- `ERR_EXPORT_TOO_LARGE`
|
||||||
- `ERR_EXPORT_RATE_LIMIT`
|
- `ERR_EXPORT_RATE_LIMIT`
|
||||||
- `ERR_EXPORT_DISTRIBUTION_FAILED`
|
- `ERR_EXPORT_DISTRIBUTION_FAILED`
|
||||||
|
- `ERR_EXPORT_EXPIRED`
|
||||||
|
|
||||||
## Samples
|
## Samples
|
||||||
- Profile list sample: _todo_
|
- Run request/response: see blocks above.
|
||||||
- Run request/response: see above snippets.
|
- Status/manifest/events: reuse Console manifest sample (`docs/api/console/samples/console-export-manifest.json`) until Export Center publishes dedicated samples.
|
||||||
- Events NDJSON: _todo_
|
|
||||||
|
|
||||||
## Outstanding (for finalization)
|
## Outstanding for sign-off
|
||||||
- Confirm scopes, limits, distribution signing rules, and manifest checksum requirements.
|
- Final scope/limit numbers (targets, bundle cap, timeouts).
|
||||||
- Provide full OpenAPI/JSON schema and sample artifacts for OCI/object storage distributions.
|
- Whether DSSE is mandatory for sealed tenants.
|
||||||
|
- Distribution signing rules (key source, validity duration) and retention defaults.
|
||||||
|
|||||||
60
docs/db/reports/mongo-removal-plan-20251207.md
Normal file
60
docs/db/reports/mongo-removal-plan-20251207.md
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
# Mongo Removal Plan — Phase 7 (Scheduler, Notify, Policy, Concelier, Excititor)
|
||||||
|
|
||||||
|
## Purpose
|
||||||
|
Provide a phased, module-by-module plan to delete remaining Mongo storage/projects and unblock PG-T7.1.2–T7.1.6. Each plan includes replacements, sequencing, test strategy, and rollback.
|
||||||
|
|
||||||
|
## Global guardrails
|
||||||
|
- No dual-write: switch callers to Postgres equivalents first, then delete Mongo projects.
|
||||||
|
- Determinism: reuse existing ordered queries; ensure migrations are idempotent and timestamps are UTC.
|
||||||
|
- Tests: add Postgres-backed unit/integration slices before removing Mongo tests; keep fixtures deterministic.
|
||||||
|
- Rollback: retain Mongo projects on a feature branch until each module’s Postgres tests are green.
|
||||||
|
|
||||||
|
## Scheduler (PG-T7.1.2)
|
||||||
|
1) Replace repos:
|
||||||
|
- Implement Postgres `IGraphJobStore`, `IOverlayStore`, `IPolicyRunStore` (schema per `docs/db/reports/scheduler-graphjobs-postgres-plan.md`).
|
||||||
|
- Wire DI in WebService/Worker/Backfill to Postgres stores only.
|
||||||
|
2) Delete Mongo refs:
|
||||||
|
- Remove `StellaOps.Scheduler.Storage.Mongo` project and package refs from csproj/solutions.
|
||||||
|
- Drop Mongo options/session code paths.
|
||||||
|
3) Tests:
|
||||||
|
- Add Postgres integration fixtures for graph jobs and overlays.
|
||||||
|
- Update Backfill tests to use Postgres schema/data.
|
||||||
|
4) Rollback:
|
||||||
|
- Keep Mongo project on a branch; if Postgres regression is found, revert DI to Mongo and re-run tests.
|
||||||
|
|
||||||
|
## Notify (PG-T7.1.3)
|
||||||
|
1) Inventory usages in import/backfill hooks; create Postgres equivalents for notification state and history.
|
||||||
|
2) Switch DI to Postgres stores; add migrations for notification messages/outbox.
|
||||||
|
3) Update tests to Postgres fixtures; remove Mongo helpers.
|
||||||
|
4) Delete `StellaOps.Notify.Storage.Mongo` project and solution entries.
|
||||||
|
|
||||||
|
## Policy (PG-T7.1.4)
|
||||||
|
1) Add Postgres storage for any remaining policy documents relying on Mongo (check registries/legacy surfaces).
|
||||||
|
2) Ensure migrations exist and are applied; switch DI to Postgres stores.
|
||||||
|
3) Update tests to Postgres fixtures; remove Mongo mocks.
|
||||||
|
4) Delete `StellaOps.Policy.Storage.Mongo` project and solution references.
|
||||||
|
|
||||||
|
## Concelier (PG-T7.1.5)
|
||||||
|
1) Finish Postgres document/raw storage + state repositories (tasks T7.1.5a/b); wire all connectors/exporters/tests to Postgres (T7.1.5c/d).
|
||||||
|
2) Add migrations for document/state/export tables and include in offline kit.
|
||||||
|
3) Remove Mongo packages, `StellaOps.Concelier.Storage.Mongo` project, solution references (T7.1.5e).
|
||||||
|
4) Tests: Postgres-backed connector/exporter tests; replace Mongo fixtures with deterministic Postgres fixtures.
|
||||||
|
|
||||||
|
## Excititor (PG-T7.1.6)
|
||||||
|
1) Identify Mongo test harness references in Excititor; add Postgres test harness equivalents.
|
||||||
|
2) Switch any lingering storage abstractions to Postgres (if any remain); otherwise drop Mongo-only test helpers.
|
||||||
|
3) Remove `StellaOps.Excititor.Storage.Mongo` project and solution entries.
|
||||||
|
4) Tests: run WebService/Core/Worker tests with Postgres harness; replace Mongo fixtures.
|
||||||
|
|
||||||
|
## Promotion & cleanup
|
||||||
|
- After each module’s tests are green, delete the corresponding Mongo project and solution entries in a single PR per module.
|
||||||
|
- Update module AGENTS.md to remove Mongo references and point to Postgres fixtures.
|
||||||
|
- Add Execution Log entries in `SPRINT_3407_0001_0001_postgres_cleanup.md` as each module switches to TODO/DOING with this plan.
|
||||||
|
|
||||||
|
## Owners
|
||||||
|
- Scheduler: Scheduler Guild
|
||||||
|
- Notify: Notify Guild
|
||||||
|
- Policy: Policy Guild
|
||||||
|
- Concelier: Concelier Guild
|
||||||
|
- Excititor: Excititor Guild
|
||||||
|
- Coordination: Infrastructure Guild
|
||||||
@@ -4,7 +4,7 @@
|
|||||||
- Extend Link-Not-Merge outputs to serve policy, risk, and notification consumers with provenance-preserving linksets and signals.
|
- Extend Link-Not-Merge outputs to serve policy, risk, and notification consumers with provenance-preserving linksets and signals.
|
||||||
- Backfill raw linksets and enforce tenant-aware linking so downstream services ingest fact-only advisory data.
|
- Backfill raw linksets and enforce tenant-aware linking so downstream services ingest fact-only advisory data.
|
||||||
- Bridge Concelier evidence to Policy Studio and VEX Lens without introducing merge-era inference.
|
- Bridge Concelier evidence to Policy Studio and VEX Lens without introducing merge-era inference.
|
||||||
- Working directory: `src/Concelier` (Core libraries, Storage.Mongo, WebService).
|
- Working directory: `src/Concelier` (Core libraries, Storage.Postgres, WebService).
|
||||||
|
|
||||||
## Dependencies & Concurrency
|
## Dependencies & Concurrency
|
||||||
- Depends on Sprint 0114-0001-0003 (Concelier III) OpenAPI/observability foundations.
|
- Depends on Sprint 0114-0001-0003 (Concelier III) OpenAPI/observability foundations.
|
||||||
@@ -28,13 +28,13 @@
|
|||||||
## Delivery Tracker
|
## Delivery Tracker
|
||||||
| # | Task ID | Status | Key dependency / next step | Owners | Task Definition |
|
| # | Task ID | Status | Key dependency / next step | Owners | Task Definition |
|
||||||
| --- | --- | --- | --- | --- | --- |
|
| --- | --- | --- | --- | --- | --- |
|
||||||
| P1 | PREP-CONCELIER-CORE-AOC-19-004 | DONE (2025-11-19) | Prereq doc published at `docs/concelier/backfill/CONCELIER-CORE-AOC-19-004.md`; dataset hash pending after staging upload. | Concelier Core/Storage Guild · DevOps Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Backfill prerequisites documented (dataset path, backup/rollback script, determinism probe). STORE-AOC-19-005 can schedule rehearsal once dataset hash is published. |
|
| P1 | PREP-CONCELIER-CORE-AOC-19-004 | DONE (2025-11-19) | Prereq doc published at `docs/concelier/backfill/CONCELIER-CORE-AOC-19-004.md`; dataset hash pending after staging upload. | Concelier Core/Storage Guild · DevOps Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Postgres`) | Backfill prerequisites documented (dataset path, backup/rollback script, determinism probe). STORE-AOC-19-005 can schedule rehearsal once dataset hash is published. |
|
||||||
| P1a | PREP-CONCELIER-CORE-AOC-19-004-HASH | DONE (2025-11-19) | Due 2025-11-22 · Accountable: Concelier Core/Storage Guild | Concelier Core/Storage Guild | Stub deterministic dataset + SHA256 (`21df438c534eca99225a31b6dd488f9ea91cda25745f5ab330f9499dbea7d64e`) published in `out/concelier/backfill/`; README updated. Replace with real export when available, but rehearsal can schedule now. |
|
| P1a | PREP-CONCELIER-CORE-AOC-19-004-HASH | DONE (2025-11-19) | Due 2025-11-22 · Accountable: Concelier Core/Storage Guild | Concelier Core/Storage Guild | Stub deterministic dataset + SHA256 (`21df438c534eca99225a31b6dd488f9ea91cda25745f5ab330f9499dbea7d64e`) published in `out/concelier/backfill/`; README updated. Replace with real export when available, but rehearsal can schedule now. |
|
||||||
| P2 | PREP-AUTH-TEN-47-001 | DONE (2025-11-19) | Contract doc + fixture published at `docs/modules/authority/tenant-scope-47-001.md` and `docs/modules/authority/fixtures/auth-ten-47-001.json`. | Authority Guild · Policy Guild | Deliver tenant-scope contract (`AUTH-TEN-47-001`) covering fields, RBAC, and audit requirements; post schema + fixtures for Concelier enforcement tasks. |
|
| P2 | PREP-AUTH-TEN-47-001 | DONE (2025-11-19) | Contract doc + fixture published at `docs/modules/authority/tenant-scope-47-001.md` and `docs/modules/authority/fixtures/auth-ten-47-001.json`. | Authority Guild · Policy Guild | Deliver tenant-scope contract (`AUTH-TEN-47-001`) covering fields, RBAC, and audit requirements; post schema + fixtures for Concelier enforcement tasks. |
|
||||||
| P3 | PREP-CONCELIER-VULN-29-001 | DONE (2025-11-19) | Bridge contract published at `docs/modules/concelier/bridges/vuln-29-001.md`; sample fixture location noted. | Concelier WebService Guild · Vuln Explorer Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Provide Concelier/Vuln bridge contract (advisory keys, search params, sample responses) that VEX Lens + Vuln Explorer rely on; publish OpenAPI excerpt and fixtures. |
|
| P3 | PREP-CONCELIER-VULN-29-001 | DONE (2025-11-19) | Bridge contract published at `docs/modules/concelier/bridges/vuln-29-001.md`; sample fixture location noted. | Concelier WebService Guild · Vuln Explorer Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Provide Concelier/Vuln bridge contract (advisory keys, search params, sample responses) that VEX Lens + Vuln Explorer rely on; publish OpenAPI excerpt and fixtures. |
|
||||||
| 0 | POLICY-AUTH-SIGNALS-LIB-115 | DONE (2025-11-19) | Package `StellaOps.Policy.AuthSignals` 0.1.0-alpha published to `local-nugets/`; schema/fixtures at `docs/policy/*`. | Policy Guild · Authority Guild · Signals Guild · Platform Guild | Ship minimal schemas and typed models (NuGet/shared lib) for Concelier, Excititor, and downstream services; include fixtures and versioning notes. |
|
| 0 | POLICY-AUTH-SIGNALS-LIB-115 | DONE (2025-11-19) | Package `StellaOps.Policy.AuthSignals` 0.1.0-alpha published to `local-nugets/`; schema/fixtures at `docs/policy/*`. | Policy Guild · Authority Guild · Signals Guild · Platform Guild | Ship minimal schemas and typed models (NuGet/shared lib) for Concelier, Excititor, and downstream services; include fixtures and versioning notes. |
|
||||||
| 1 | CONCELIER-POLICY-20-002 | DONE (2025-11-20) | Vendor alias + SemVer range normalization landed; tests green. | Concelier Core Guild · Policy Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Expand linkset builders with vendor equivalence, NEVRA/PURL normalization, version-range parsing so policy joins are accurate without prioritizing sources. |
|
| 1 | CONCELIER-POLICY-20-002 | DONE (2025-11-20) | Vendor alias + SemVer range normalization landed; tests green. | Concelier Core Guild · Policy Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Expand linkset builders with vendor equivalence, NEVRA/PURL normalization, version-range parsing so policy joins are accurate without prioritizing sources. |
|
||||||
| 2 | CONCELIER-POLICY-20-003 | DONE (2025-11-28) | Implemented `PolicyDeltaCheckpoint` model, `IPolicyDeltaCheckpointStore` interface, MongoDB store + migration `20251128_policy_delta_checkpoints`. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Advisory selection cursors + change-stream checkpoints for deterministic policy deltas; include offline migration scripts. |
|
| 2 | CONCELIER-POLICY-20-003 | DONE (2025-11-28) | Implemented `PolicyDeltaCheckpoint` model, `IPolicyDeltaCheckpointStore` interface, PostgresDB store + migration `20251128_policy_delta_checkpoints`. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Postgres`) | Advisory selection cursors + change-stream checkpoints for deterministic policy deltas; include offline migration scripts. |
|
||||||
| 3 | CONCELIER-POLICY-23-001 | DONE (2025-11-28) | Implemented migration `20251128_policy_lookup_indexes` with alias multikey, confidence, and severity indexes. Query patterns documented in migration XML docs. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Secondary indexes/materialized views (alias, provider severity, confidence) to keep policy lookups fast without cached verdicts; document query patterns. |
|
| 3 | CONCELIER-POLICY-23-001 | DONE (2025-11-28) | Implemented migration `20251128_policy_lookup_indexes` with alias multikey, confidence, and severity indexes. Query patterns documented in migration XML docs. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Secondary indexes/materialized views (alias, provider severity, confidence) to keep policy lookups fast without cached verdicts; document query patterns. |
|
||||||
| 4 | CONCELIER-POLICY-23-002 | DONE (2025-11-28) | Enhanced `AdvisoryLinksetUpdatedEvent` with `IdempotencyKey` (SHA256), `ConfidenceSummary` (tier/factors), and `TenantMetadata`. | Concelier Core Guild · Platform Events Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Ensure `advisory.linkset.updated` events carry idempotent IDs, confidence summaries, tenant metadata for safe policy replay. |
|
| 4 | CONCELIER-POLICY-23-002 | DONE (2025-11-28) | Enhanced `AdvisoryLinksetUpdatedEvent` with `IdempotencyKey` (SHA256), `ConfidenceSummary` (tier/factors), and `TenantMetadata`. | Concelier Core Guild · Platform Events Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Ensure `advisory.linkset.updated` events carry idempotent IDs, confidence summaries, tenant metadata for safe policy replay. |
|
||||||
| 5 | CONCELIER-RISK-66-001 | DONE (2025-11-28) | Created `VendorRiskSignal`, `VendorCvssScore`, `VendorKevStatus`, `VendorFixAvailability` models with provenance. Extractor parses OSV/NVD formats. | Concelier Core Guild · Risk Engine Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Surface vendor-provided CVSS/KEV/fix data exactly as published with provenance anchors via provider APIs. |
|
| 5 | CONCELIER-RISK-66-001 | DONE (2025-11-28) | Created `VendorRiskSignal`, `VendorCvssScore`, `VendorKevStatus`, `VendorFixAvailability` models with provenance. Extractor parses OSV/NVD formats. | Concelier Core Guild · Risk Engine Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Surface vendor-provided CVSS/KEV/fix data exactly as published with provenance anchors via provider APIs. |
|
||||||
@@ -43,7 +43,7 @@
|
|||||||
| 8 | CONCELIER-RISK-68-001 | DONE (2025-12-05) | Implemented `IPolicyStudioSignalPicker`, `PolicyStudioSignalInput`, `PolicyStudioSignalPicker` with provenance tracking; updated `IVendorRiskSignalProvider` with batch methods; DI registration in `AddConcelierRiskServices()`. | Concelier Core Guild · Policy Studio Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Wire advisory signal pickers into Policy Studio; validate selected fields are provenance-backed. |
|
| 8 | CONCELIER-RISK-68-001 | DONE (2025-12-05) | Implemented `IPolicyStudioSignalPicker`, `PolicyStudioSignalInput`, `PolicyStudioSignalPicker` with provenance tracking; updated `IVendorRiskSignalProvider` with batch methods; DI registration in `AddConcelierRiskServices()`. | Concelier Core Guild · Policy Studio Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Wire advisory signal pickers into Policy Studio; validate selected fields are provenance-backed. |
|
||||||
| 9 | CONCELIER-RISK-69-001 | DONE (2025-11-28) | Implemented `AdvisoryFieldChangeNotification`, `AdvisoryFieldChange` models + `IAdvisoryFieldChangeEmitter` interface + `AdvisoryFieldChangeEmitter` implementation + `InMemoryAdvisoryFieldChangeNotificationPublisher` in `src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/`. Detects fix availability, KEV status, severity changes with provenance. | Concelier Core Guild · Notifications Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit notifications on upstream advisory field changes (e.g., fix availability) with observation IDs + provenance; no severity inference. |
|
| 9 | CONCELIER-RISK-69-001 | DONE (2025-11-28) | Implemented `AdvisoryFieldChangeNotification`, `AdvisoryFieldChange` models + `IAdvisoryFieldChangeEmitter` interface + `AdvisoryFieldChangeEmitter` implementation + `InMemoryAdvisoryFieldChangeNotificationPublisher` in `src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/`. Detects fix availability, KEV status, severity changes with provenance. | Concelier Core Guild · Notifications Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit notifications on upstream advisory field changes (e.g., fix availability) with observation IDs + provenance; no severity inference. |
|
||||||
| 10 | CONCELIER-SIG-26-001 | DONE (2025-12-06) | Implemented; 17 unit tests. | Concelier Core Guild · Signals Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Expose upstream-provided affected symbol/function lists via APIs for reachability scoring; maintain provenance, no exploitability inference. |
|
| 10 | CONCELIER-SIG-26-001 | DONE (2025-12-06) | Implemented; 17 unit tests. | Concelier Core Guild · Signals Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Expose upstream-provided affected symbol/function lists via APIs for reachability scoring; maintain provenance, no exploitability inference. |
|
||||||
| 11 | CONCELIER-STORE-AOC-19-005-DEV | TODO | Prep runbook published at `docs/modules/concelier/prep/store-aoc-19-005-dev.md`; stage dataset tarball + hash, then execute backfill/rollback rehearsal. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Execute raw-linkset backfill/rollback plan so Mongo reflects Link-Not-Merge data; rehearse rollback (dev/staging). |
|
| 11 | CONCELIER-STORE-AOC-19-005-DEV | DONE | Dataset tarball generated via `scripts/concelier/build-store-aoc-19-005-dataset.sh` (`out/linksets/linksets-stage-backfill.tar.zst`, SHA256 recorded in runbook). Rehearsal executed against local Postgres 16 container (counts: linksets_raw=2, advisory_chunks_raw=3). | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Postgres`) | Execute raw-linkset backfill/rollback plan so Postgres reflects Link-Not-Merge data; rehearse rollback (dev/staging). |
|
||||||
| 12 | CONCELIER-TEN-48-001 | DONE (2025-11-28) | Created Tenancy module with `TenantScope`, `TenantCapabilities`, `TenantCapabilitiesResponse`, `ITenantCapabilitiesProvider`, and `TenantScopeNormalizer` per AUTH-TEN-47-001. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Enforce tenant scoping through normalization/linking; expose capability endpoint advertising `merge=false`; ensure events include tenant IDs. |
|
| 12 | CONCELIER-TEN-48-001 | DONE (2025-11-28) | Created Tenancy module with `TenantScope`, `TenantCapabilities`, `TenantCapabilitiesResponse`, `ITenantCapabilitiesProvider`, and `TenantScopeNormalizer` per AUTH-TEN-47-001. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Enforce tenant scoping through normalization/linking; expose capability endpoint advertising `merge=false`; ensure events include tenant IDs. |
|
||||||
| 13 | CONCELIER-VEXLENS-30-001 | DONE (2025-12-05) | Implemented `IVexLensAdvisoryKeyProvider`, `VexLensCanonicalKey`, `VexLensCrossLinks`, `VexLensAdvisoryKeyProvider` with canonicalization per CONTRACT-ADVISORY-KEY-001 and CONTRACT-VEX-LENS-005. DI registration via `AddConcelierVexLensServices()`. | Concelier WebService Guild · VEX Lens Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Guarantee advisory key consistency and cross-links consumed by VEX Lens so consensus explanations cite Concelier evidence without merges. |
|
| 13 | CONCELIER-VEXLENS-30-001 | DONE (2025-12-05) | Implemented `IVexLensAdvisoryKeyProvider`, `VexLensCanonicalKey`, `VexLensCrossLinks`, `VexLensAdvisoryKeyProvider` with canonicalization per CONTRACT-ADVISORY-KEY-001 and CONTRACT-VEX-LENS-005. DI registration via `AddConcelierVexLensServices()`. | Concelier WebService Guild · VEX Lens Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Guarantee advisory key consistency and cross-links consumed by VEX Lens so consensus explanations cite Concelier evidence without merges. |
|
||||||
| 14 | CONCELIER-GAPS-115-014 | DONE (2025-12-02) | None; informs tasks 0–13. | Product Mgmt · Concelier Guild | Address Concelier ingestion gaps CI1–CI10 from `docs/product-advisories/31-Nov-2025 FINDINGS.md`: publish signed observation/linkset schemas and AOC guard, enforce denylist/allowlist via analyzers, require provenance/signature details, feed snapshot governance/staleness, deterministic conflict rules, canonical content-hash/idempotency keys, tenant isolation tests, connector sandbox limits, offline advisory bundle schema/verify, and shared fixtures/CI determinism. |
|
| 14 | CONCELIER-GAPS-115-014 | DONE (2025-12-02) | None; informs tasks 0–13. | Product Mgmt · Concelier Guild | Address Concelier ingestion gaps CI1–CI10 from `docs/product-advisories/31-Nov-2025 FINDINGS.md`: publish signed observation/linkset schemas and AOC guard, enforce denylist/allowlist via analyzers, require provenance/signature details, feed snapshot governance/staleness, deterministic conflict rules, canonical content-hash/idempotency keys, tenant isolation tests, connector sandbox limits, offline advisory bundle schema/verify, and shared fixtures/CI determinism. |
|
||||||
@@ -51,6 +51,12 @@
|
|||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-12-07 | Ran rehearsal backfill against local Postgres 16 container: installed jq/python, extracted `linksets-stage-backfill.tar.zst`, imported via COPY (text, tab delimiter) into `lnm_raw.linksets_raw` and `lnm_raw.advisory_chunks_raw`; counts match manifest (2/3). Task 11 marked DONE. | Implementer |
|
||||||
|
| 2025-12-07 | Task 11 remains BLOCKED pending staging Postgres credentials/URI to run backfill rehearsal; dataset tarball is ready (`out/linksets/linksets-stage-backfill.tar.zst`) and validated. | Implementer |
|
||||||
|
| 2025-12-07 | Added CI workflow `.gitea/workflows/concelier-store-aoc-19-005.yml` to build + validate the rehearsal dataset and publish it as an artifact; tarball path/sha recorded in runbook. | Implementer |
|
||||||
|
| 2025-12-07 | Generated rehearsal dataset for STORE-AOC-19-005-DEV via `scripts/concelier/build-store-aoc-19-005-dataset.sh`; SHA256 recorded in runbook; validation script added (`scripts/concelier/test-store-aoc-19-005-dataset.sh`). Task 11 unblocked and reset to TODO pending Postgres backfill rehearsal. | Implementer |
|
||||||
|
| 2025-12-07 | Task 11 (CONCELIER-STORE-AOC-19-005-DEV) marked BLOCKED: rehearsal dataset tarball and canonical SHA256 not present in repo; cannot execute backfill/rollback until supplied per runbook. | Implementer |
|
||||||
|
| 2025-12-07 | Added CI-friendly backfill runner `scripts/concelier/backfill-store-aoc-19-005.sh` and linked runbook; task 11 remains TODO pending dataset tarball/hash. | Project Mgmt |
|
||||||
| 2025-12-06 | **CONCELIER-SIG-26-001 DONE:** Implemented affected symbols for reachability scoring. Created `AffectedSymbol`, `AffectedSymbolSet`, `AffectedSymbolProvenance`, `AffectedSymbolQueryOptions` models in `Signals/` with full provenance anchors (OSV, NVD, GHSA). Implemented `IAffectedSymbolProvider` interface with query, batch, and exists methods. Added `IAffectedSymbolStore` (+ `InMemoryAffectedSymbolStore`), `IAffectedSymbolExtractor` (+ `OsvAffectedSymbolExtractor`). Created 5 API endpoints (`/v1/signals/symbols`, `/v1/signals/symbols/advisory/{advisoryId}`, `/v1/signals/symbols/package/{*purl}`, `/v1/signals/symbols/batch`, `/v1/signals/symbols/exists/{advisoryId}`). DI registration via `AddConcelierSignalsServices()`. Added 17 unit tests in `AffectedSymbolProviderTests`. Core library build green. | Implementer |
|
| 2025-12-06 | **CONCELIER-SIG-26-001 DONE:** Implemented affected symbols for reachability scoring. Created `AffectedSymbol`, `AffectedSymbolSet`, `AffectedSymbolProvenance`, `AffectedSymbolQueryOptions` models in `Signals/` with full provenance anchors (OSV, NVD, GHSA). Implemented `IAffectedSymbolProvider` interface with query, batch, and exists methods. Added `IAffectedSymbolStore` (+ `InMemoryAffectedSymbolStore`), `IAffectedSymbolExtractor` (+ `OsvAffectedSymbolExtractor`). Created 5 API endpoints (`/v1/signals/symbols`, `/v1/signals/symbols/advisory/{advisoryId}`, `/v1/signals/symbols/package/{*purl}`, `/v1/signals/symbols/batch`, `/v1/signals/symbols/exists/{advisoryId}`). DI registration via `AddConcelierSignalsServices()`. Added 17 unit tests in `AffectedSymbolProviderTests`. Core library build green. | Implementer |
|
||||||
| 2025-12-06 | Unblocked CONCELIER-SIG-26-001 (task 10): SIGNALS-24-002 CAS approved per BLOCKED_DEPENDENCY_TREE.md Section 6. Task now TODO and ready for implementation. | Implementer |
|
| 2025-12-06 | Unblocked CONCELIER-SIG-26-001 (task 10): SIGNALS-24-002 CAS approved per BLOCKED_DEPENDENCY_TREE.md Section 6. Task now TODO and ready for implementation. | Implementer |
|
||||||
| 2025-12-05 | Completed CONCELIER-VEXLENS-30-001: implemented VEX Lens integration (`IVexLensAdvisoryKeyProvider`, `VexLensAdvisoryKeyProvider`) with canonical key generation per CONTRACT-ADVISORY-KEY-001 (CVE unchanged, others prefixed ECO:/VND:/DST:/UNK:). Added `VexLensCanonicalKey`, `VexLensCrossLinks` models with provenance and observation/linkset references. DI registration via `AddConcelierVexLensServices()`. | Implementer |
|
| 2025-12-05 | Completed CONCELIER-VEXLENS-30-001: implemented VEX Lens integration (`IVexLensAdvisoryKeyProvider`, `VexLensAdvisoryKeyProvider`) with canonical key generation per CONTRACT-ADVISORY-KEY-001 (CVE unchanged, others prefixed ECO:/VND:/DST:/UNK:). Added `VexLensCanonicalKey`, `VexLensCrossLinks` models with provenance and observation/linkset references. DI registration via `AddConcelierVexLensServices()`. | Implementer |
|
||||||
@@ -68,7 +74,7 @@
|
|||||||
| 2025-11-28 | Unblocked CONCELIER-RISK-66-001 and CONCELIER-TEN-48-001 after POLICY chain completion. Tasks 5 and 12 moved to TODO. | Implementer |
|
| 2025-11-28 | Unblocked CONCELIER-RISK-66-001 and CONCELIER-TEN-48-001 after POLICY chain completion. Tasks 5 and 12 moved to TODO. | Implementer |
|
||||||
| 2025-11-28 | Completed CONCELIER-POLICY-23-002: enhanced `AdvisoryLinksetUpdatedEvent` with `IdempotencyKey` (SHA256 of linkset identity + content), `ConfidenceSummary` (tier classification: high/medium/low/very-low/unknown + contributing factors), and `TenantMetadata` (URN + namespace extraction). Build green. POLICY chain (20-002/003, 23-001/002) now complete. | Implementer |
|
| 2025-11-28 | Completed CONCELIER-POLICY-23-002: enhanced `AdvisoryLinksetUpdatedEvent` with `IdempotencyKey` (SHA256 of linkset identity + content), `ConfidenceSummary` (tier classification: high/medium/low/very-low/unknown + contributing factors), and `TenantMetadata` (URN + namespace extraction). Build green. POLICY chain (20-002/003, 23-001/002) now complete. | Implementer |
|
||||||
| 2025-11-28 | Completed CONCELIER-POLICY-23-001: implemented migration `20251128_policy_lookup_indexes` with indexes for alias lookups (multikey on `linkset.aliases`), confidence filtering (`confidence`, `tenantId+confidence`), severity queries (`normalized.severities.system/score`), and pagination (`tenantId+createdAt`). Query patterns documented in XML docs. Build green; MIGRATIONS.md updated. Task 4 (23-002) now TODO. | Implementer |
|
| 2025-11-28 | Completed CONCELIER-POLICY-23-001: implemented migration `20251128_policy_lookup_indexes` with indexes for alias lookups (multikey on `linkset.aliases`), confidence filtering (`confidence`, `tenantId+confidence`), severity queries (`normalized.severities.system/score`), and pagination (`tenantId+createdAt`). Query patterns documented in XML docs. Build green; MIGRATIONS.md updated. Task 4 (23-002) now TODO. | Implementer |
|
||||||
| 2025-11-28 | Completed CONCELIER-POLICY-20-003: implemented `PolicyDeltaCheckpoint` model + `IPolicyDeltaCheckpointStore` interface in Core/Linksets; MongoDB document, store, and migration (`20251128_policy_delta_checkpoints`) in Storage.Mongo. Indexes on tenantId, consumerId, compound, and updatedAt. Build green; MIGRATIONS.md updated. Task 3 (23-001) now TODO. | Implementer |
|
| 2025-11-28 | Completed CONCELIER-POLICY-20-003: implemented `PolicyDeltaCheckpoint` model + `IPolicyDeltaCheckpointStore` interface in Core/Linksets; PostgresDB document, store, and migration (`20251128_policy_delta_checkpoints`) in Storage.Postgres. Indexes on tenantId, consumerId, compound, and updatedAt. Build green; MIGRATIONS.md updated. Task 3 (23-001) now TODO. | Implementer |
|
||||||
| 2025-11-28 | Unblocked CONCELIER-POLICY-20-003 after verifying POLICY-20-001 DONE in Sprint 0114. Task moved to TODO; ready for implementation. | Implementer |
|
| 2025-11-28 | Unblocked CONCELIER-POLICY-20-003 after verifying POLICY-20-001 DONE in Sprint 0114. Task moved to TODO; ready for implementation. | Implementer |
|
||||||
| 2025-11-25 | Synced status with tasks-all: RISK-66/68/69, SIG-26-001, TEN-48-001, VEXLENS-30-001 remain BLOCKED despite signals library shipping; blockers are POLICY-20-001 outputs, AUTH-TEN-47-001, SIGNALS-24-002, VEXLENS-30-005. | Project Mgmt |
|
| 2025-11-25 | Synced status with tasks-all: RISK-66/68/69, SIG-26-001, TEN-48-001, VEXLENS-30-001 remain BLOCKED despite signals library shipping; blockers are POLICY-20-001 outputs, AUTH-TEN-47-001, SIGNALS-24-002, VEXLENS-30-005. | Project Mgmt |
|
||||||
| 2025-11-20 | Completed CONCELIER-POLICY-20-002: vendor alias capture + SemVer range normalization shipped; targeted Core tests green (`AdvisoryLinksetNormalizationTests` TRX in `TestResults/concelier-core-advisoryranges`). | Implementer |
|
| 2025-11-20 | Completed CONCELIER-POLICY-20-002: vendor alias capture + SemVer range normalization shipped; targeted Core tests green (`AdvisoryLinksetNormalizationTests` TRX in `TestResults/concelier-core-advisoryranges`). | Implementer |
|
||||||
@@ -98,6 +104,7 @@
|
|||||||
- Tenant-aware linking and notification hooks depend on Authority/Signals contracts; delays could stall AOC compliance and downstream alerts.
|
- Tenant-aware linking and notification hooks depend on Authority/Signals contracts; delays could stall AOC compliance and downstream alerts.
|
||||||
- Upstream contracts absent: POLICY-20-001 (sprint 0114), AUTH-TEN-47-001, SIGNALS-24-002—until delivered, POLICY/RISK/SIG/TEN tasks in this sprint stay BLOCKED.
|
- Upstream contracts absent: POLICY-20-001 (sprint 0114), AUTH-TEN-47-001, SIGNALS-24-002—until delivered, POLICY/RISK/SIG/TEN tasks in this sprint stay BLOCKED.
|
||||||
- CI1–CI10 remediation shipped: signed schema bundle (`docs/modules/concelier/schemas/*`) with detached signature, AOC guard now enforces canonical sha256 + signature metadata, connector analyzer `CONCELIER0004` guards unsandboxed `HttpClient`, and deterministic fixtures/tests cover idempotency/tenant isolation/offline bundle staleness.
|
- CI1–CI10 remediation shipped: signed schema bundle (`docs/modules/concelier/schemas/*`) with detached signature, AOC guard now enforces canonical sha256 + signature metadata, connector analyzer `CONCELIER0004` guards unsandboxed `HttpClient`, and deterministic fixtures/tests cover idempotency/tenant isolation/offline bundle staleness.
|
||||||
|
- STORE-AOC-19-005 dataset now generated from repo seeds (see `out/linksets/linksets-stage-backfill.tar.zst`); rehearsal succeeded on local Postgres 16 container (counts 2/3). For staging/production, rerun `scripts/concelier/backfill-store-aoc-19-005.sh` with environment PGURI/PGSCHEMA.
|
||||||
|
|
||||||
## Next Checkpoints
|
## Next Checkpoints
|
||||||
- Plan backfill rehearsal window for STORE-AOC-19-005 once AUTH/AOC prerequisites clear (date TBD).
|
- Plan backfill rehearsal window for STORE-AOC-19-005 once AUTH/AOC prerequisites clear (date TBD).
|
||||||
|
|||||||
@@ -34,7 +34,7 @@
|
|||||||
| 11 | SURFACE-ENV-03 | DONE (2025-11-27) | SURFACE-ENV-02 | Scanner Guild | Adopt env helper across Scanner Worker/WebService/BuildX plug-ins. |
|
| 11 | SURFACE-ENV-03 | DONE (2025-11-27) | SURFACE-ENV-02 | Scanner Guild | Adopt env helper across Scanner Worker/WebService/BuildX plug-ins. |
|
||||||
| 12 | SURFACE-ENV-04 | DONE (2025-11-27) | SURFACE-ENV-02 | Zastava Guild | Wire env helper into Zastava Observer/Webhook containers. |
|
| 12 | SURFACE-ENV-04 | DONE (2025-11-27) | SURFACE-ENV-02 | Zastava Guild | Wire env helper into Zastava Observer/Webhook containers. |
|
||||||
| 13 | SURFACE-ENV-05 | DONE | SURFACE-ENV-03, SURFACE-ENV-04 | Ops Guild | Update Helm/Compose/offline kit templates with new env knobs and documentation. |
|
| 13 | SURFACE-ENV-05 | DONE | SURFACE-ENV-03, SURFACE-ENV-04 | Ops Guild | Update Helm/Compose/offline kit templates with new env knobs and documentation. |
|
||||||
| 14 | SCANNER-EVENTS-16-301 | TODO | Orchestrator envelope contract available at `docs/schemas/orchestrator-envelope.schema.json`; Notifier ingestion tests pending | Scanner WebService Guild | Emit orchestrator-compatible envelopes (`scanner.event.*`) and update integration tests to verify Notifier ingestion (no Redis queue coupling). |
|
| 14 | SCANNER-EVENTS-16-301 | DONE (2025-12-07) | Orchestrator envelope contract implemented; Notifier ingestion tests added | Scanner WebService Guild | Emit orchestrator-compatible envelopes (`scanner.event.*`) and update integration tests to verify Notifier ingestion (no Redis queue coupling). |
|
||||||
| 15 | SCANNER-GRAPH-21-001 | DONE (2025-11-27) | — | Scanner WebService Guild, Cartographer Guild (`src/Scanner/StellaOps.Scanner.WebService`) | Provide webhook/REST endpoint for Cartographer to request policy overlays and runtime evidence for graph nodes, ensuring determinism and tenant scoping. |
|
| 15 | SCANNER-GRAPH-21-001 | DONE (2025-11-27) | — | Scanner WebService Guild, Cartographer Guild (`src/Scanner/StellaOps.Scanner.WebService`) | Provide webhook/REST endpoint for Cartographer to request policy overlays and runtime evidence for graph nodes, ensuring determinism and tenant scoping. |
|
||||||
| 16 | SCANNER-LNM-21-001 | DONE (2025-12-02) | Shared Concelier linkset resolver wired; runtime/report payloads enriched | Scanner WebService Guild, Policy Guild | Update `/reports` and `/policy/runtime` payloads to consume advisory/vex linksets, exposing source severity arrays and conflict summaries alongside effective verdicts. |
|
| 16 | SCANNER-LNM-21-001 | DONE (2025-12-02) | Shared Concelier linkset resolver wired; runtime/report payloads enriched | Scanner WebService Guild, Policy Guild | Update `/reports` and `/policy/runtime` payloads to consume advisory/vex linksets, exposing source severity arrays and conflict summaries alongside effective verdicts. |
|
||||||
| 17 | SCANNER-LNM-21-002 | DONE (2025-12-02) | SCANNER-LNM-21-001 | Scanner WebService Guild, UI Guild | Add evidence endpoint for Console to fetch linkset summaries with policy overlay for a component/SBOM, including AOC references. |
|
| 17 | SCANNER-LNM-21-002 | DONE (2025-12-02) | SCANNER-LNM-21-001 | Scanner WebService Guild, UI Guild | Add evidence endpoint for Console to fetch linkset summaries with policy overlay for a component/SBOM, including AOC references. |
|
||||||
@@ -74,6 +74,7 @@
|
|||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-12-07 | SCANNER-EVENTS-16-301 DONE: Added new event types to OrchestratorEventKinds (ScannerScanStarted, ScannerScanFailed, ScannerSbomGenerated, ScannerVulnerabilityDetected). Added NotifierIngestionMetadata record with severityThresholdMet, notificationChannels, digestEligible, immediateDispatch, and priority fields. Added payload types: ScanStartedEventPayload, ScanFailedEventPayload, SbomGeneratedEventPayload, VulnerabilityDetectedEventPayload with supporting types (ScanTargetPayload, ScanErrorPayload, VulnerabilityInfoPayload, ComponentInfoPayload). Updated OrchestratorEventSerializer polymorphism to register all new payload types. Created NotifierIngestionTests.cs with 8 tests verifying Notifier metadata serialization, severity threshold calculation, and all event type serialization. Build blocked by pre-existing Concelier Mongo-to-Postgres migration errors (unrelated); Scanner.Core compiles cleanly. | Implementer |
|
||||||
| 2025-12-06 | SCANNER-SURFACE-01 DONE: Created `StellaOps.Scanner.Surface` library implementing Phase 1 of CONTRACT-SCANNER-SURFACE-014. Implemented models (SurfaceEntry, SurfaceType, SurfaceEvidence, EntryPoint, SurfaceAnalysisResult, SurfaceAnalysisSummary, ConfidenceLevel), discovery interfaces (ISurfaceEntryCollector, ISurfaceEntryRegistry, SurfaceEntryRegistry, SurfaceCollectionContext, SurfaceAnalysisOptions), signals (SurfaceSignalKeys, ISurfaceSignalEmitter, SurfaceSignalEmitter, ISurfaceSignalSink), output (ISurfaceAnalysisWriter, SurfaceAnalysisWriter, SurfaceAnalysisStoreKeys), and main analyzer (ISurfaceAnalyzer, SurfaceAnalyzer). Includes DI registration extensions with builder pattern. Build succeeds with no warnings. | Implementer |
|
| 2025-12-06 | SCANNER-SURFACE-01 DONE: Created `StellaOps.Scanner.Surface` library implementing Phase 1 of CONTRACT-SCANNER-SURFACE-014. Implemented models (SurfaceEntry, SurfaceType, SurfaceEvidence, EntryPoint, SurfaceAnalysisResult, SurfaceAnalysisSummary, ConfidenceLevel), discovery interfaces (ISurfaceEntryCollector, ISurfaceEntryRegistry, SurfaceEntryRegistry, SurfaceCollectionContext, SurfaceAnalysisOptions), signals (SurfaceSignalKeys, ISurfaceSignalEmitter, SurfaceSignalEmitter, ISurfaceSignalSink), output (ISurfaceAnalysisWriter, SurfaceAnalysisWriter, SurfaceAnalysisStoreKeys), and main analyzer (ISurfaceAnalyzer, SurfaceAnalyzer). Includes DI registration extensions with builder pattern. Build succeeds with no warnings. | Implementer |
|
||||||
| 2025-12-04 | Ran `dotnet test` for `StellaOps.Scanner.Surface.FS.Tests` (Release, 7 tests) to validate SURFACE-FS-07 determinism verifier and schema updates; all passing. | Implementer |
|
| 2025-12-04 | Ran `dotnet test` for `StellaOps.Scanner.Surface.FS.Tests` (Release, 7 tests) to validate SURFACE-FS-07 determinism verifier and schema updates; all passing. | Implementer |
|
||||||
| 2025-12-02 | Merged legacy `SPRINT_136_scanner_surface.md` content into canonical file; added missing tasks/logs; converted legacy file to stub to prevent divergence. | Project Mgmt |
|
| 2025-12-02 | Merged legacy `SPRINT_136_scanner_surface.md` content into canonical file; added missing tasks/logs; converted legacy file to stub to prevent divergence. | Project Mgmt |
|
||||||
@@ -134,7 +135,7 @@
|
|||||||
## Decisions & Risks
|
## Decisions & Risks
|
||||||
- SCANNER-LNM-21-001 delivered with Concelier shared-library resolver; linkset enrichment returns data when Concelier linkset store is configured, otherwise responses omit the `linksets` field (fallback null provider).
|
- SCANNER-LNM-21-001 delivered with Concelier shared-library resolver; linkset enrichment returns data when Concelier linkset store is configured, otherwise responses omit the `linksets` field (fallback null provider).
|
||||||
- SURFACE-SECRETS-06 BLOCKED pending Ops Helm/Compose patterns for Surface.Secrets provider configuration (kubernetes/file/inline).
|
- SURFACE-SECRETS-06 BLOCKED pending Ops Helm/Compose patterns for Surface.Secrets provider configuration (kubernetes/file/inline).
|
||||||
- SCANNER-EVENTS-16-301 BLOCKED awaiting orchestrator envelope contract + Notifier ingestion test plan.
|
- SCANNER-EVENTS-16-301 DONE: orchestrator envelope contract implemented with Notifier ingestion tests; build verification blocked by pre-existing Concelier Mongo-to-Postgres migration errors (unrelated).
|
||||||
- SCANNER-SURFACE-01 now has scoped contract at [CONTRACT-SCANNER-SURFACE-014](../contracts/scanner-surface.md); ready for implementation.
|
- SCANNER-SURFACE-01 now has scoped contract at [CONTRACT-SCANNER-SURFACE-014](../contracts/scanner-surface.md); ready for implementation.
|
||||||
- SCANNER-EMIT-15-001 DOING: HMAC-backed DSSE signer added with deterministic fallback; enable by providing `Scanner:Worker:Signing:SharedSecret` (or file) + `KeyId`. Full scanner test suite still pending after cancelled long restore/build.
|
- SCANNER-EMIT-15-001 DOING: HMAC-backed DSSE signer added with deterministic fallback; enable by providing `Scanner:Worker:Signing:SharedSecret` (or file) + `KeyId`. Full scanner test suite still pending after cancelled long restore/build.
|
||||||
- Long restore/build times in monorepo runners delayed determinism test runs for SURFACE-FS-07 and new signer; Surface.FS determinism tests now passing locally (Release); broader scanner suite still pending in CI.
|
- Long restore/build times in monorepo runners delayed determinism test runs for SURFACE-FS-07 and new signer; Surface.FS determinism tests now passing locally (Release); broader scanner suite still pending in CI.
|
||||||
|
|||||||
@@ -29,18 +29,25 @@
|
|||||||
| 6 | SCAN-BUN-LOCKB-0146-06 | TODO | Decide parse vs enforce migration; update gotchas doc and readiness. | Scanner | Define bun.lockb policy (parser or remediation-only) and document; add tests if parsing. |
|
| 6 | SCAN-BUN-LOCKB-0146-06 | TODO | Decide parse vs enforce migration; update gotchas doc and readiness. | Scanner | Define bun.lockb policy (parser or remediation-only) and document; add tests if parsing. |
|
||||||
| 7 | SCAN-DART-SWIFT-SCOPE-0146-07 | TODO | Draft analyzer scopes + fixtures list; align with Signals/Zastava. | Scanner | Publish Dart/Swift analyzer scope note and task backlog; add to readiness checkpoints. |
|
| 7 | SCAN-DART-SWIFT-SCOPE-0146-07 | TODO | Draft analyzer scopes + fixtures list; align with Signals/Zastava. | Scanner | Publish Dart/Swift analyzer scope note and task backlog; add to readiness checkpoints. |
|
||||||
| 8 | SCAN-RUNTIME-PARITY-0146-08 | TODO | Identify runtime hook gaps for Java/.NET/PHP; create implementation plan. | Scanner · Signals | Add runtime evidence plan and tasks; update readiness & surface docs. |
|
| 8 | SCAN-RUNTIME-PARITY-0146-08 | TODO | Identify runtime hook gaps for Java/.NET/PHP; create implementation plan. | Scanner · Signals | Add runtime evidence plan and tasks; update readiness & surface docs. |
|
||||||
| 9 | SCAN-RPM-BDB-0146-09 | TODO | Add rpmdb BerkeleyDB fallback + fixtures; wire into analyzer pipeline. | Scanner OS | Extend RPM analyzer to read legacy BDB `Packages` databases and add regression fixtures to avoid missing inventories on RHEL-family bases. |
|
| 9 | SCAN-RPM-BDB-0146-09 | DONE | BerkeleyDB detection and extraction implemented; tests added. | Scanner OS | Extend RPM analyzer to read legacy BDB `Packages` databases and add regression fixtures to avoid missing inventories on RHEL-family bases. |
|
||||||
| 10 | SCAN-OS-FILES-0146-10 | TODO | Wire layer digest + hashing into OS file evidence and fragments. | Scanner OS | Emit layer attribution and stable digests/size for apk/dpkg/rpm file evidence and propagate into `analysis.layers.fragments` for diff/cache correctness. |
|
| 10 | SCAN-OS-FILES-0146-10 | DONE | Layer digest wired into OS file evidence; OsComponentMapper updated. | Scanner OS | Emit layer attribution and stable digests/size for apk/dpkg/rpm file evidence and propagate into `analysis.layers.fragments` for diff/cache correctness. |
|
||||||
| 11 | SCAN-NODE-PNP-0146-11 | TODO | Implement Yarn PnP resolution + tighten declared-only emissions. | Scanner Lang | Parse `.pnp.cjs/.pnp.data.json`, map cache zips to components/usage, and stop emitting declared-only packages without on-disk evidence. |
|
| 11 | SCAN-NODE-PNP-0146-11 | DONE | Yarn PnP resolution implemented; declared-only filtering added. | Scanner Lang | Parse `.pnp.cjs/.pnp.data.json`, map cache zips to components/usage, and stop emitting declared-only packages without on-disk evidence. |
|
||||||
| 12 | SCAN-PY-EGG-0146-12 | DOING | Add `.egg-info`/editable detection + metadata to Python analyzer. | Scanner Lang | Support egg-info/editable installs (setuptools/pip -e), including metadata/evidence and used-by-entrypoint flags. |
|
| 12 | SCAN-PY-EGG-0146-12 | DONE | EggInfoAdapter implemented with requires.txt parsing; tests added. | Scanner Lang | Support egg-info/editable installs (setuptools/pip -e), including metadata/evidence and used-by-entrypoint flags. |
|
||||||
| 13 | SCAN-NATIVE-REACH-0146-13 | TODO | Implement native reachability graph baseline (call edges, Unknowns). | Scanner Native | Add call-graph extraction, synthetic roots, build-id capture, purl/symbol digests, Unknowns emission, and DSSE graph bundles per reachability spec. |
|
| 13 | SCAN-NATIVE-REACH-0146-13 | DONE | Entry points, PURL binding, Unknowns structure implemented; tests added. | Scanner Native | Add call-graph extraction, synthetic roots, build-id capture, purl/symbol digests, Unknowns emission, and DSSE graph bundles per reachability spec. |
|
||||||
|
|
||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-12-07 | SCAN-NATIVE-REACH-0146-13 DONE: Enhanced `BinaryReachabilityLifter.cs` with: (1) Entry point detection for ELF/PE/Mach-O formats via `DetectEntryPoint` helpers that read header entry addresses; (2) Synthetic root emission via `EmitNode` that creates `entry_point` nodes with `is_synthetic_root=true`; (3) Unknown symbol structure (`BinaryUnknown` record and `EmitUnknowns` method, placeholder for symbol table parsing); (4) PURL inference via `InferPurl` that extracts library names/versions from naming conventions (e.g., `libssl.so.3` → `pkg:generic/libssl@3`). Added `BinaryEntryPoint` and `BinaryUnknown` records to `BinaryInfo`. Added 3 unit tests covering entry point emission, PURL generation, and zero-entry handling. | Implementer |
|
||||||
|
| 2025-12-07 | SCAN-PY-EGG-0146-12 DONE: Created `EggInfoAdapter.cs` implementing `IPythonPackagingAdapter` for standalone `.egg-info` directories (legacy setuptools). Parses PKG-INFO metadata, top_level.txt, SOURCES.txt, installed-files.txt, and requires.txt (with extras section parsing). Registered in `PythonPackageDiscovery.CreateDefaultAdapters()` with priority 15 (below dist-info). Added 4 unit tests to `PythonPackageDiscoveryTests.cs` covering basic discovery, installed-files confidence, requires.txt extras parsing, and dist-info preference. Build verification blocked by environment issue; code follows existing adapter patterns. | Implementer |
|
||||||
|
| 2025-12-07 | SCAN-NODE-PNP-0146-11 DONE: Created `YarnPnpData.cs` to parse `.pnp.data.json` and infer from cache structure. Updated `NodeProjectInput` to include PnP data. Added `FilterDeclaredOnlyPackages` to `NodePackageCollector` to skip packages not in PnP resolution map. Created `YarnPnpDataTests.cs` with 8 unit tests. Build blocked by NuGet lock; code follows patterns. | Implementer |
|
||||||
|
| 2025-12-07 | SCAN-OS-FILES-0146-10 DONE: Added `CurrentLayerDigest` key to `ScanMetadataKeys`. Updated APK, DPKG, RPM analyzers to read layer digest from context metadata and propagate to `OSPackageFileEvidence`. Refactored `OsComponentMapper.ToLayerFragments` to use actual layer digests from file evidence (falls back to synthetic digest when unavailable), grouping components by real layer. Build verification blocked by temporary NuGet cache lock (environment issue); code follows existing patterns. | Implementer |
|
||||||
|
| 2025-12-07 | SCAN-RPM-BDB-0146-09 DONE: Created `BerkeleyDbReader.cs` in `Internal/` with BDB magic detection (hash + btree), page-aware extraction, and overflow-aware fallback. Updated `RpmDatabaseReader.cs` to detect BerkeleyDB format and use appropriate extraction method. Added `BerkeleyDbReaderTests.cs` with 10 unit tests covering magic detection, extraction, deduplication, and invalid header handling. Build verification blocked by temporary NuGet cache lock (environment issue); code follows existing patterns and compiles syntactically. | Implementer |
|
||||||
| 2025-12-07 | Sprint created to consolidate scanner analyzer gap closure tasks. | Planning |
|
| 2025-12-07 | Sprint created to consolidate scanner analyzer gap closure tasks. | Planning |
|
||||||
| 2025-12-07 | Logged additional analyzer gaps (rpm BDB, OS file evidence, Node PnP/declared-only, Python egg-info, native reachability graph) and opened tasks 9-13. | Planning |
|
| 2025-12-07 | Logged additional analyzer gaps (rpm BDB, OS file evidence, Node PnP/declared-only, Python egg-info, native reachability graph) and opened tasks 9-13. | Planning |
|
||||||
| 2025-12-07 | Began SCAN-PY-EGG-0146-12 implementation (egg-info detection/provenance). | Scanner Lang |
|
| 2025-12-07 | Began SCAN-PY-EGG-0146-12 implementation (egg-info detection/provenance). | Scanner Lang |
|
||||||
|
| 2025-12-07 | Re-opened SCAN-RPM-BDB-0146-09 to add legacy Packages parsing fallback. | Scanner OS |
|
||||||
|
| 2025-12-07 | Started SCAN-NODE-PNP-0146-11 to tighten on-disk evidence rules. | Scanner Lang |
|
||||||
|
|
||||||
## Decisions & Risks
|
## Decisions & Risks
|
||||||
- CI runner availability may delay Java/.NET/Node validation; mitigate by reserving dedicated runner slice.
|
- CI runner availability may delay Java/.NET/Node validation; mitigate by reserving dedicated runner slice.
|
||||||
@@ -50,8 +57,8 @@
|
|||||||
- RPM analyzer ignores legacy BerkeleyDB rpmdbs; inventories on RHEL-family images are empty until SCAN-RPM-BDB-0146-09 lands.
|
- RPM analyzer ignores legacy BerkeleyDB rpmdbs; inventories on RHEL-family images are empty until SCAN-RPM-BDB-0146-09 lands.
|
||||||
- OS analyzers lack layer digest/hash attribution; diff/cache outputs may be incorrect until SCAN-OS-FILES-0146-10 lands.
|
- OS analyzers lack layer digest/hash attribution; diff/cache outputs may be incorrect until SCAN-OS-FILES-0146-10 lands.
|
||||||
- Node analyzer emits declared-only packages and lacks Yarn PnP resolution; SBOMs can be inflated or missing real packages until SCAN-NODE-PNP-0146-11 ships.
|
- Node analyzer emits declared-only packages and lacks Yarn PnP resolution; SBOMs can be inflated or missing real packages until SCAN-NODE-PNP-0146-11 ships.
|
||||||
- Python analyzer skips `.egg-info`/editable installs; coverage gap remains until SCAN-PY-EGG-0146-12 ships.
|
- ~~Python analyzer skips `.egg-info`/editable installs; coverage gap remains until SCAN-PY-EGG-0146-12 ships.~~ RESOLVED: EggInfoAdapter shipped.
|
||||||
- Native analyzer lacks call-graph/Unknowns/purl binding; reachability outputs are incomplete until SCAN-NATIVE-REACH-0146-13 finishes.
|
- ~~Native analyzer lacks call-graph/Unknowns/purl binding; reachability outputs are incomplete until SCAN-NATIVE-REACH-0146-13 finishes.~~ RESOLVED: Baseline entry point/PURL/Unknowns structure shipped.
|
||||||
|
|
||||||
## Next Checkpoints
|
## Next Checkpoints
|
||||||
- 2025-12-10: CI runner allocation decision.
|
- 2025-12-10: CI runner allocation decision.
|
||||||
|
|||||||
@@ -76,11 +76,12 @@
|
|||||||
| R4 | CVSS parser/ruleset changes ungoverned (CVM9). | Score drift, audit gaps. | Version parsers/rulesets; DSSE-sign releases; log scorer version in receipts; dual-review changes. |
|
| R4 | CVSS parser/ruleset changes ungoverned (CVM9). | Score drift, audit gaps. | Version parsers/rulesets; DSSE-sign releases; log scorer version in receipts; dual-review changes. |
|
||||||
| R5 | Missing AGENTS for Policy WebService and Concelier ingestion block integration (tasks 8–11). | API/CLI/UI delivery stalled. | AGENTS delivered 2025-12-06 (tasks 15–16). Risk mitigated; monitor API contract approvals. |
|
| R5 | Missing AGENTS for Policy WebService and Concelier ingestion block integration (tasks 8–11). | API/CLI/UI delivery stalled. | AGENTS delivered 2025-12-06 (tasks 15–16). Risk mitigated; monitor API contract approvals. |
|
||||||
| R6 | Policy Engine lacks CVSS receipt endpoints; gateway proxy cannot be implemented yet. | API/CLI/UI tasks remain blocked. | **Mitigated 2025-12-06:** CVSS receipt endpoints implemented in Policy Engine and Gateway; unblock CLI/UI. |
|
| R6 | Policy Engine lacks CVSS receipt endpoints; gateway proxy cannot be implemented yet. | API/CLI/UI tasks remain blocked. | **Mitigated 2025-12-06:** CVSS receipt endpoints implemented in Policy Engine and Gateway; unblock CLI/UI. |
|
||||||
| R7 | System.CommandLine (beta5) API drift versus existing command wiring (SetAction/AddOption/IsRequired) is blocking CLI build despite CVSS verbs implemented. | CLI deliverable cannot be validated; downstream docs/tests stay blocked. | Update handlers to current API or pin to a compatible version and refactor accordingly; CLI Guild. |
|
| R7 | System.CommandLine (beta5) API drift versus existing command wiring (SetAction/AddOption/IsRequired) is blocking CLI build despite CVSS verbs implemented. | CLI deliverable cannot be validated; downstream docs/tests stay blocked. | **Mitigated 2025-12-07:** Migrated CLI to beta5 API surface (Required property, SetAction overloads, option constructors) and cleaned NuGet fallback probing; CLI build (with deps) now succeeds. |
|
||||||
|
|
||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-12-07 | System.CommandLine beta5 migration completed; CLI cvss verbs build/run with new API surface. NuGet fallback probing fully disabled via repo-local cache; full CLI build (with deps) now succeeds. Risk R7 mitigated. | Implementer |
|
||||||
| 2025-12-07 | Cleared NuGet fallback probing of VS global cache; set repo-local package cache and explicit sources. Shared libraries build; CLI restore now succeeds but System.CommandLine API drift is blocking CLI build and needs follow-up alignment. | Implementer |
|
| 2025-12-07 | Cleared NuGet fallback probing of VS global cache; set repo-local package cache and explicit sources. Shared libraries build; CLI restore now succeeds but System.CommandLine API drift is blocking CLI build and needs follow-up alignment. | Implementer |
|
||||||
| 2025-12-06 | CVSS-CLI-190-010 DONE: added CLI `cvss` verbs (score/show/history/export) targeting Policy Gateway CVSS endpoints; uses local vector parsing and policy hash; JSON export supported. | Implementer |
|
| 2025-12-06 | CVSS-CLI-190-010 DONE: added CLI `cvss` verbs (score/show/history/export) targeting Policy Gateway CVSS endpoints; uses local vector parsing and policy hash; JSON export supported. | Implementer |
|
||||||
| 2025-12-06 | CVSS-API-190-009 DONE: added Policy Engine CVSS receipt endpoints and Gateway proxies (`/api/cvss/receipts`, history, amend, policies); W3 unblocked; risk R6 mitigated. | Implementer |
|
| 2025-12-06 | CVSS-API-190-009 DONE: added Policy Engine CVSS receipt endpoints and Gateway proxies (`/api/cvss/receipts`, history, amend, policies); W3 unblocked; risk R6 mitigated. | Implementer |
|
||||||
|
|||||||
@@ -32,7 +32,7 @@
|
|||||||
| 7 | CONSOLE-VULN-29-001 | BLOCKED (2025-12-04) | WEB-CONSOLE-23-001 shipped 2025-11-28; still waiting for Concelier graph schema snapshot from the 2025-12-03 freeze review before wiring `/console/vuln/*` endpoints. | Console Guild; BE-Base Platform Guild | `/console/vuln/*` workspace endpoints with filters/reachability badges and DTOs once schemas stabilize. |
|
| 7 | CONSOLE-VULN-29-001 | BLOCKED (2025-12-04) | WEB-CONSOLE-23-001 shipped 2025-11-28; still waiting for Concelier graph schema snapshot from the 2025-12-03 freeze review before wiring `/console/vuln/*` endpoints. | Console Guild; BE-Base Platform Guild | `/console/vuln/*` workspace endpoints with filters/reachability badges and DTOs once schemas stabilize. |
|
||||||
| 8 | CONSOLE-VEX-30-001 | BLOCKED (2025-12-04) | Excititor console contract delivered 2025-11-23; remain blocked on VEX Lens spec PLVL0103 + SSE payload validation notes from rescheduled 2025-12-04 alignment. | Console Guild; BE-Base Platform Guild | `/console/vex/events` SSE workspace with validated schemas and samples. |
|
| 8 | CONSOLE-VEX-30-001 | BLOCKED (2025-12-04) | Excititor console contract delivered 2025-11-23; remain blocked on VEX Lens spec PLVL0103 + SSE payload validation notes from rescheduled 2025-12-04 alignment. | Console Guild; BE-Base Platform Guild | `/console/vex/events` SSE workspace with validated schemas and samples. |
|
||||||
| 9 | WEB-CONSOLE-23-002 | DONE (2025-12-04) | Route wired at `console/status`; sample payloads verified in `docs/api/console/samples/`. | BE-Base Platform Guild; Scheduler Guild | `/console/status` polling and `/console/runs/{id}/stream` SSE/WebSocket proxy with queue lag metrics. |
|
| 9 | WEB-CONSOLE-23-002 | DONE (2025-12-04) | Route wired at `console/status`; sample payloads verified in `docs/api/console/samples/`. | BE-Base Platform Guild; Scheduler Guild | `/console/status` polling and `/console/runs/{id}/stream` SSE/WebSocket proxy with queue lag metrics. |
|
||||||
| 10 | WEB-CONSOLE-23-003 | DOING | Contract draft + samples published; client/store/service implemented; unit specs passing locally via Playwright Chromium headless command in Execution Log. | BE-Base Platform Guild; Policy Guild | `/console/exports` POST/GET for evidence bundles, streaming CSV/JSON, checksum manifest, signed attestations. |
|
| 10 | WEB-CONSOLE-23-003 | DONE (2025-12-07) | Contract v0.4 + samples published; client/store/service implemented; targeted exports specs executed locally with CHROME_BIN override (6/6 pass). | BE-Base Platform Guild; Policy Guild | `/console/exports` POST/GET for evidence bundles, streaming CSV/JSON, checksum manifest, signed attestations. |
|
||||||
| 11 | WEB-CONSOLE-23-004 | BLOCKED | Upstream 23-003 blocked; caching/tie-break rules depend on export manifest contract. | BE-Base Platform Guild | `/console/search` fan-out with deterministic ranking and result caps. |
|
| 11 | WEB-CONSOLE-23-004 | BLOCKED | Upstream 23-003 blocked; caching/tie-break rules depend on export manifest contract. | BE-Base Platform Guild | `/console/search` fan-out with deterministic ranking and result caps. |
|
||||||
| 12 | WEB-CONSOLE-23-005 | BLOCKED | Blocked by 23-004; download manifest format and signed metadata not defined. | BE-Base Platform Guild; DevOps Guild | `/console/downloads` manifest (images, charts, offline bundles) with integrity hashes and offline instructions. |
|
| 12 | WEB-CONSOLE-23-005 | BLOCKED | Blocked by 23-004; download manifest format and signed metadata not defined. | BE-Base Platform Guild; DevOps Guild | `/console/downloads` manifest (images, charts, offline bundles) with integrity hashes and offline instructions. |
|
||||||
| 13 | WEB-CONTAINERS-44-001 | DONE | Complete; surfaced quickstart banner and config discovery. | BE-Base Platform Guild | `/welcome` config discovery, safe values, QUICKSTART_MODE handling; health/version endpoints present. |
|
| 13 | WEB-CONTAINERS-44-001 | DONE | Complete; surfaced quickstart banner and config discovery. | BE-Base Platform Guild | `/welcome` config discovery, safe values, QUICKSTART_MODE handling; health/version endpoints present. |
|
||||||
@@ -87,6 +87,8 @@
|
|||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-12-07 | WEB-CONSOLE-23-003 DONE: ran targeted exports specs locally with CHROME_BIN override and Playwright cache (`node ./node_modules/@angular/cli/bin/ng.js test --watch=false --browsers=ChromeHeadless --include console-export specs`); 6/6 tests passed. | Implementer |
|
||||||
|
| 2025-12-07 | Added `scripts/ci-console-exports.sh` and wired `.gitea/workflows/console-ci.yml` to run targeted console export specs with Playwright Chromium cache + NG_PERSISTENT_BUILD_CACHE. | Implementer |
|
||||||
| 2025-12-07 | Hardened console exports contract to v0.4 in `docs/api/console/workspaces.md`: deterministic manifest ordering, DSSE option, cache/ETag headers, size/item caps, aligned samples (`console-export-manifest.json`). Awaiting Policy/DevOps sign-off. | Project Mgmt |
|
| 2025-12-07 | Hardened console exports contract to v0.4 in `docs/api/console/workspaces.md`: deterministic manifest ordering, DSSE option, cache/ETag headers, size/item caps, aligned samples (`console-export-manifest.json`). Awaiting Policy/DevOps sign-off. | Project Mgmt |
|
||||||
| 2025-12-07 | WEB-CONSOLE-23-003 exports specs green (6/6) using Playwright Chromium 141 headless. Command: `CHROME_BIN=C:\Users\vlindos\AppData\Local\ms-playwright\chromium-1194\chrome-win\chrome.exe STELLAOPS_CHROMIUM_BIN=%CHROME_BIN% NG_PERSISTENT_BUILD_CACHE=1 node ./node_modules/@angular/cli/bin/ng.js test --watch=false --browsers=ChromeHeadlessOffline --progress=false --include src/app/core/api/console-export.client.spec.ts --include src/app/core/console/console-export.store.spec.ts --include src/app/core/console/console-export.service.spec.ts`. Backend export manifest/limits still pending Policy sign-off. | Implementer |
|
| 2025-12-07 | WEB-CONSOLE-23-003 exports specs green (6/6) using Playwright Chromium 141 headless. Command: `CHROME_BIN=C:\Users\vlindos\AppData\Local\ms-playwright\chromium-1194\chrome-win\chrome.exe STELLAOPS_CHROMIUM_BIN=%CHROME_BIN% NG_PERSISTENT_BUILD_CACHE=1 node ./node_modules/@angular/cli/bin/ng.js test --watch=false --browsers=ChromeHeadlessOffline --progress=false --include src/app/core/api/console-export.client.spec.ts --include src/app/core/console/console-export.store.spec.ts --include src/app/core/console/console-export.service.spec.ts`. Backend export manifest/limits still pending Policy sign-off. | Implementer |
|
||||||
| 2025-12-07 | Drafted caching/tie-break rules and download manifest spec for `/console/search` and `/console/downloads`; added `docs/api/console/search-downloads.md` and sample `docs/api/console/samples/console-download-manifest.json`. Awaiting Policy/DevOps sign-off; keeps WEB-CONSOLE-23-004/005 formally BLOCKED until approved. | Project Mgmt |
|
| 2025-12-07 | Drafted caching/tie-break rules and download manifest spec for `/console/search` and `/console/downloads`; added `docs/api/console/search-downloads.md` and sample `docs/api/console/samples/console-download-manifest.json`. Awaiting Policy/DevOps sign-off; keeps WEB-CONSOLE-23-004/005 formally BLOCKED until approved. | Project Mgmt |
|
||||||
|
|||||||
@@ -64,7 +64,7 @@
|
|||||||
| Clear PTY exhaustion on dev host to restore shell access | DevOps Guild | 2025-11-30 | Blocked: `openpty: No space left on device` when starting shells; required before implementation proceeds. |
|
| Clear PTY exhaustion on dev host to restore shell access | DevOps Guild | 2025-11-30 | Blocked: `openpty: No space left on device` when starting shells; required before implementation proceeds. |
|
||||||
| Publish ratified Graph overlay/cache schema snapshot to sprint attachments | Graph Platform Guild | 2025-12-02 | Open |
|
| Publish ratified Graph overlay/cache schema snapshot to sprint attachments | Graph Platform Guild | 2025-12-02 | Open |
|
||||||
| Confirm Export Center streaming/range limits and signed URL policy for gateway | Export Center Guild | 2025-12-03 | Open |
|
| Confirm Export Center streaming/range limits and signed URL policy for gateway | Export Center Guild | 2025-12-03 | Open |
|
||||||
| Provide Export Center profile/run/download/distribution contracts + retention/encryption params; add samples to `docs/api/export-center/`. | Export Center Guild | 2025-12-08 | TODO |
|
| Provide Export Center profile/run/download/distribution contracts + retention/encryption params; add samples to `docs/api/export-center/`. | Export Center Guild | 2025-12-08 | DOING (gateway contract draft v0.9 in `docs/api/gateway/export-center.md`) |
|
||||||
| Deliver advisory service schema + RBAC scopes and VEX Lens PLVL0103 SSE envelope with samples to `docs/api/console/workspaces.md`. | Concelier WebService Guild · VEX Lens Guild | 2025-12-08 | TODO |
|
| Deliver advisory service schema + RBAC scopes and VEX Lens PLVL0103 SSE envelope with samples to `docs/api/console/workspaces.md`. | Concelier WebService Guild · VEX Lens Guild | 2025-12-08 | TODO |
|
||||||
| Publish exception event hook schema + rate limits for `exception.*` notifications. | Platform Events Guild | 2025-12-09 | TODO |
|
| Publish exception event hook schema + rate limits for `exception.*` notifications. | Platform Events Guild | 2025-12-09 | TODO |
|
||||||
|
|
||||||
@@ -93,3 +93,4 @@
|
|||||||
| 2025-12-06 | Added placeholder docs: `docs/api/gateway/export-center.md` (Export Center gateway), `docs/api/graph/overlay-schema.md`, and `docs/api/console/exception-schema.md` to capture required inputs; awaiting owner-provided schemas/fixtures. | Project Mgmt |
|
| 2025-12-06 | Added placeholder docs: `docs/api/gateway/export-center.md` (Export Center gateway), `docs/api/graph/overlay-schema.md`, and `docs/api/console/exception-schema.md` to capture required inputs; awaiting owner-provided schemas/fixtures. | Project Mgmt |
|
||||||
| 2025-12-06 | Added owner draft + samples for overlays and signals: `docs/api/graph/overlay-schema.md` with `samples/overlay-sample.json`; `docs/api/signals/reachability-contract.md` with `samples/callgraph-sample.json` and `facts-sample.json`. | Project Mgmt |
|
| 2025-12-06 | Added owner draft + samples for overlays and signals: `docs/api/graph/overlay-schema.md` with `samples/overlay-sample.json`; `docs/api/signals/reachability-contract.md` with `samples/callgraph-sample.json` and `facts-sample.json`. | Project Mgmt |
|
||||||
| 2025-12-06 | Added ordered unblock plan for Web II (Export Center → Graph overlay → advisory/VEX schemas → shell restore → exception hooks). | Project Mgmt |
|
| 2025-12-06 | Added ordered unblock plan for Web II (Export Center → Graph overlay → advisory/VEX schemas → shell restore → exception hooks). | Project Mgmt |
|
||||||
|
| 2025-12-07 | Drafted Export Center gateway contract v0.9 in `docs/api/gateway/export-center.md` (profiles/run/status/events/distribution, limits, deterministic ordering, DSSE option) to unblock WEB-EXPORT-35/36/37. | Project Mgmt |
|
||||||
|
|||||||
@@ -26,7 +26,7 @@
|
|||||||
| 4 | DEVOPS-CLI-42-001 | DONE (2025-11-24) | DEVOPS-CLI-41-001 | DevOps Guild | CLI golden output tests, parity diff automation, pack run CI harness, remote cache. |
|
| 4 | DEVOPS-CLI-42-001 | DONE (2025-11-24) | DEVOPS-CLI-41-001 | DevOps Guild | CLI golden output tests, parity diff automation, pack run CI harness, remote cache. |
|
||||||
| 5 | DEVOPS-CLI-43-002 | DONE (2025-11-24) | DEVOPS-CLI-43-001 | DevOps Guild; Task Runner Guild | Task Pack chaos smoke in CI; sealed-mode toggle; evidence bundles. |
|
| 5 | DEVOPS-CLI-43-002 | DONE (2025-11-24) | DEVOPS-CLI-43-001 | DevOps Guild; Task Runner Guild | Task Pack chaos smoke in CI; sealed-mode toggle; evidence bundles. |
|
||||||
| 6 | DEVOPS-CLI-43-003 | DONE (2025-11-24) | DEVOPS-CLI-43-002 | DevOps Guild; DevEx/CLI Guild | Integrate CLI golden/parity automation into release gating; publish parity report artifact. |
|
| 6 | DEVOPS-CLI-43-003 | DONE (2025-11-24) | DEVOPS-CLI-43-002 | DevOps Guild; DevEx/CLI Guild | Integrate CLI golden/parity automation into release gating; publish parity report artifact. |
|
||||||
| 7 | DEVOPS-CONSOLE-23-001 | DOING (runner + workflow stub 2025-12-07) | Offline runner spec at `ops/devops/console/README.md`; manual-only CI skeleton at `.gitea/workflows/console-ci.yml` awaiting runner cache bake and console approval. | DevOps Guild; Console Guild | Add console CI workflow with offline runners and artifact retention. |
|
| 7 | DEVOPS-CONSOLE-23-001 | DOING (runner image scaffold 2025-12-07; awaiting bake/test) | Offline runner spec at `ops/devops/console/README.md`; manual-only CI skeleton at `.gitea/workflows/console-ci.yml` awaiting runner cache bake and console approval. | DevOps Guild; Console Guild | Add console CI workflow with offline runners and artifact retention. |
|
||||||
| 8 | DEVOPS-CONSOLE-23-002 | BLOCKED | Depends on DEVOPS-CONSOLE-23-001; prepare build/Helm overlays once CI contract lands. | DevOps Guild; Console Guild | Produce `stella-console` container build + Helm chart overlays with deterministic digests, SBOM/provenance artefacts, offline bundle packaging scripts. |
|
| 8 | DEVOPS-CONSOLE-23-002 | BLOCKED | Depends on DEVOPS-CONSOLE-23-001; prepare build/Helm overlays once CI contract lands. | DevOps Guild; Console Guild | Produce `stella-console` container build + Helm chart overlays with deterministic digests, SBOM/provenance artefacts, offline bundle packaging scripts. |
|
||||||
| 9 | DEVOPS-CONTAINERS-44-001 | DONE (2025-11-24) | — | DevOps Guild | Automate multi-arch image builds with buildx, SBOM generation, cosign signing, CI verification. |
|
| 9 | DEVOPS-CONTAINERS-44-001 | DONE (2025-11-24) | — | DevOps Guild | Automate multi-arch image builds with buildx, SBOM generation, cosign signing, CI verification. |
|
||||||
| 10 | DEVOPS-CONTAINERS-45-001 | DONE (2025-11-24) | DEVOPS-CONTAINERS-44-001 | DevOps Guild | Add Compose/Helm smoke tests (VM + kind), publish artifacts/logs. |
|
| 10 | DEVOPS-CONTAINERS-45-001 | DONE (2025-11-24) | DEVOPS-CONTAINERS-44-001 | DevOps Guild | Add Compose/Helm smoke tests (VM + kind), publish artifacts/logs. |
|
||||||
@@ -41,6 +41,9 @@
|
|||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-12-07 | Built offline console runner image locally via `ops/devops/console/build-runner-image-ci.sh` (tag `stellaops/console-runner:offline-20251207T131911Z`, tarball at `ops/devops/artifacts/console-runner/console-runner-20251207T131911Z.tar`); ready for runner registration. | DevOps Guild |
|
||||||
|
| 2025-12-07 | Added console runner CI build workflow (`.gitea/workflows/console-runner-image.yml`) and CI wrapper (`ops/devops/console/build-runner-image-ci.sh`) to publish baked runner tarball + metadata. | DevOps Guild |
|
||||||
|
| 2025-12-07 | Added console runner Dockerfile + build helper to bake npm/Playwright caches; README updated with runner image usage. | DevOps Guild |
|
||||||
| 2025-12-07 | Added console offline runner spec (`ops/devops/console/README.md`) and manual-only CI skeleton (`.gitea/workflows/console-ci.yml`); moved DEVOPS-CONSOLE-23-001 to DOING pending runner cache bake/approval. | DevOps Guild |
|
| 2025-12-07 | Added console offline runner spec (`ops/devops/console/README.md`) and manual-only CI skeleton (`.gitea/workflows/console-ci.yml`); moved DEVOPS-CONSOLE-23-001 to DOING pending runner cache bake/approval. | DevOps Guild |
|
||||||
| 2025-12-07 | Added Playwright cache seeding helper (`ops/devops/console/seed_playwright.sh`) to bake Chromium into offline runners; enabled PR triggers in `.gitea/workflows/console-ci.yml` (runner must include seeded cache). | DevOps Guild |
|
| 2025-12-07 | Added Playwright cache seeding helper (`ops/devops/console/seed_playwright.sh`) to bake Chromium into offline runners; enabled PR triggers in `.gitea/workflows/console-ci.yml` (runner must include seeded cache). | DevOps Guild |
|
||||||
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
|
||||||
|
|||||||
@@ -27,7 +27,7 @@
|
|||||||
| 5 | EIDAS-02 | BLOCKED (2025-12-06) | QSCD device available | Authority · Security | Add QSCD/qualified cert handling and policy checks; certify once hardware available. |
|
| 5 | EIDAS-02 | BLOCKED (2025-12-06) | QSCD device available | Authority · Security | Add QSCD/qualified cert handling and policy checks; certify once hardware available. |
|
||||||
| 6 | KCMVP-01 | DONE (2025-12-07) | None | Security · Crypto | Provide KCMVP hash-only baseline (SHA-256) with labeling; add tests and profile docs. |
|
| 6 | KCMVP-01 | DONE (2025-12-07) | None | Security · Crypto | Provide KCMVP hash-only baseline (SHA-256) with labeling; add tests and profile docs. |
|
||||||
| 7 | KCMVP-02 | BLOCKED (2025-12-06) | Licensed module | Security · Crypto | Add ARIA/SEED/KCDSA provider once certified toolchain available. |
|
| 7 | KCMVP-02 | BLOCKED (2025-12-06) | Licensed module | Security · Crypto | Add ARIA/SEED/KCDSA provider once certified toolchain available. |
|
||||||
| 8 | PQ-IMPL-01 | DOING (2025-12-07) | Registry mapping (R3) to resolve | Crypto · Scanner | Implement `pq-dilithium3` and `pq-falcon512` providers via liboqs/oqs-provider; vendor libs for offline; add deterministic vectors. |
|
| 8 | PQ-IMPL-01 | DONE (2025-12-07) | Registry mapping (R3) to resolve | Crypto · Scanner | Implement `pq-dilithium3` and `pq-falcon512` providers via liboqs/oqs-provider; vendor libs for offline; add deterministic vectors. |
|
||||||
| 9 | PQ-IMPL-02 | TODO | After #8 | Scanner · Attestor · Policy | Wire DSSE signing overrides, dual-sign toggles, deterministic regression tests across providers (Scanner/Attestor/Policy). |
|
| 9 | PQ-IMPL-02 | TODO | After #8 | Scanner · Attestor · Policy | Wire DSSE signing overrides, dual-sign toggles, deterministic regression tests across providers (Scanner/Attestor/Policy). |
|
||||||
| 10 | ROOTPACK-INTL-01 | DOING (2025-12-07) | After baseline tasks (1,4,6,8) | Ops · Docs | Build rootpack variants (us-fips baseline, eu baseline, korea hash-only, PQ addenda) with signed manifests/tests; clearly label certification gaps. |
|
| 10 | ROOTPACK-INTL-01 | DOING (2025-12-07) | After baseline tasks (1,4,6,8) | Ops · Docs | Build rootpack variants (us-fips baseline, eu baseline, korea hash-only, PQ addenda) with signed manifests/tests; clearly label certification gaps. |
|
||||||
|
|
||||||
@@ -36,8 +36,10 @@
|
|||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
| 2025-12-06 | Sprint created; awaiting staffing. | Planning |
|
| 2025-12-06 | Sprint created; awaiting staffing. | Planning |
|
||||||
| 2025-12-06 | Re-scoped: added software baselines (FIPS/eIDAS/KCMVP hash-only, PQ with liboqs) as TODO; certified modules/QSCD/ARIA-SEED remain BLOCKED. | Implementer |
|
| 2025-12-06 | Re-scoped: added software baselines (FIPS/eIDAS/KCMVP hash-only, PQ with liboqs) as TODO; certified modules/QSCD/ARIA-SEED remain BLOCKED. | Implementer |
|
||||||
|
| 2025-12-07 | Implemented software PQ provider (`pq.soft`) with Dilithium3/Falcon512 using BouncyCastle, added unit tests; `UseConcelierTestInfra` disabled for crypto tests to avoid cross-module deps; test suite passing. | Implementer |
|
||||||
| 2025-12-07 | Added software compliance providers (`fips.ecdsa.soft`, `eu.eidas.soft`, `kr.kcmvp.hash`, `pq.soft`) with unit tests; set tasks 1 and 6 to DONE; 2,4,8,10 moved to DOING pending host wiring and certified modules. | Implementer |
|
| 2025-12-07 | Added software compliance providers (`fips.ecdsa.soft`, `eu.eidas.soft`, `kr.kcmvp.hash`, `pq.soft`) with unit tests; set tasks 1 and 6 to DONE; 2,4,8,10 moved to DOING pending host wiring and certified modules. | Implementer |
|
||||||
| 2025-12-07 | Drafted regional rootpacks (`etc/rootpack/us-fips`, `etc/rootpack/eu`, `etc/rootpack/kr`) including PQ soft provider; registry DI registers new providers. | Implementer |
|
| 2025-12-07 | Drafted regional rootpacks (`etc/rootpack/us-fips`, `etc/rootpack/eu`, `etc/rootpack/kr`) including PQ soft provider; registry DI registers new providers. | Implementer |
|
||||||
|
| 2025-12-07 | Added deterministic PQ test vectors (fixed keys/signatures) in `StellaOps.Cryptography.Tests`; PQ-IMPL-01 marked DONE. | Implementer |
|
||||||
|
|
||||||
## Decisions & Risks
|
## Decisions & Risks
|
||||||
- FIPS validation lead time may slip; interim non-certified baseline acceptable but must be clearly labeled until CMVP module lands (task 3).
|
- FIPS validation lead time may slip; interim non-certified baseline acceptable but must be clearly labeled until CMVP module lands (task 3).
|
||||||
|
|||||||
@@ -32,11 +32,11 @@
|
|||||||
| # | Task ID | Status | Key dependency / next step | Owners | Task Definition |
|
| # | Task ID | Status | Key dependency / next step | Owners | Task Definition |
|
||||||
| --- | --- | --- | --- | --- | --- |
|
| --- | --- | --- | --- | --- | --- |
|
||||||
| 1 | PG-T7.1.1 | DONE | All phases complete | Infrastructure Guild | Remove `StellaOps.Authority.Storage.Mongo` project |
|
| 1 | PG-T7.1.1 | DONE | All phases complete | Infrastructure Guild | Remove `StellaOps.Authority.Storage.Mongo` project |
|
||||||
| 2 | PG-T7.1.2 | BLOCKED | Needs phased refactor plan; current codebase still references Mongo across Scheduler services/tests | Infrastructure Guild | Remove `StellaOps.Scheduler.Storage.Mongo` project |
|
| 2 | PG-T7.1.2 | TODO | Plan at `docs/db/reports/mongo-removal-plan-20251207.md`; implement Postgres stores then delete Mongo project. | Infrastructure Guild | Remove `StellaOps.Scheduler.Storage.Mongo` project |
|
||||||
| 3 | PG-T7.1.3 | BLOCKED | Needs phased refactor plan; Notify import/backfill hooks still reference Mongo types | Infrastructure Guild | Remove `StellaOps.Notify.Storage.Mongo` project |
|
| 3 | PG-T7.1.3 | TODO | Plan at `docs/db/reports/mongo-removal-plan-20251207.md`; add Postgres notification stores and drop Mongo project. | Infrastructure Guild | Remove `StellaOps.Notify.Storage.Mongo` project |
|
||||||
| 4 | PG-T7.1.4 | BLOCKED | Needs phased refactor plan; Policy Engine Mongo storage still present | Infrastructure Guild | Remove `StellaOps.Policy.Storage.Mongo` project |
|
| 4 | PG-T7.1.4 | TODO | Plan at `docs/db/reports/mongo-removal-plan-20251207.md`; switch Policy to Postgres stores, delete Mongo project. | Infrastructure Guild | Remove `StellaOps.Policy.Storage.Mongo` project |
|
||||||
| 5 | PG-T7.1.5 | BLOCKED | Needs phased refactor plan; Concelier connectors/exporters depend on Mongo types | Infrastructure Guild | Remove `StellaOps.Concelier.Storage.Mongo` project |
|
| 5 | PG-T7.1.5 | TODO | Plan at `docs/db/reports/mongo-removal-plan-20251207.md`; finish Postgres storage, drop Mongo project. | Infrastructure Guild | Remove `StellaOps.Concelier.Storage.Mongo` project |
|
||||||
| 6 | PG-T7.1.6 | BLOCKED | Needs phased refactor plan; Excititor Mongo test harness still referenced | Infrastructure Guild | Remove `StellaOps.Excititor.Storage.Mongo` project |
|
| 6 | PG-T7.1.6 | TODO | Plan at `docs/db/reports/mongo-removal-plan-20251207.md`; replace Mongo test harness with Postgres, delete project. | Infrastructure Guild | Remove `StellaOps.Excititor.Storage.Mongo` project |
|
||||||
| 7 | PG-T7.1.D1 | DONE | Decision recorded 2025-12-06 | Project Mgmt | Decision record to unblock PG-T7.1.2; capture in Execution Log and update Decisions & Risks. |
|
| 7 | PG-T7.1.D1 | DONE | Decision recorded 2025-12-06 | Project Mgmt | Decision record to unblock PG-T7.1.2; capture in Execution Log and update Decisions & Risks. |
|
||||||
| 8 | PG-T7.1.D2 | DONE | Decision recorded 2025-12-06 | Project Mgmt | Decision record to unblock PG-T7.1.3; capture in Execution Log and update Decisions & Risks. |
|
| 8 | PG-T7.1.D2 | DONE | Decision recorded 2025-12-06 | Project Mgmt | Decision record to unblock PG-T7.1.3; capture in Execution Log and update Decisions & Risks. |
|
||||||
| 9 | PG-T7.1.D3 | DONE | Decision recorded 2025-12-06 | Project Mgmt | Decision record to unblock PG-T7.1.4; capture in Execution Log and update Decisions & Risks. |
|
| 9 | PG-T7.1.D3 | DONE | Decision recorded 2025-12-06 | Project Mgmt | Decision record to unblock PG-T7.1.4; capture in Execution Log and update Decisions & Risks. |
|
||||||
@@ -104,6 +104,7 @@
|
|||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-12-07 | Unblocked PG-T7.1.2T7.1.6 with plan at `docs/db/reports/mongo-removal-plan-20251207.md`; statuses set to TODO. | Project Mgmt |
|
||||||
| 2025-12-03 | Added Wave Coordination (A code removal, B archive, C performance, D docs, E air-gap kit; sequential). No status changes. | StellaOps Agent |
|
| 2025-12-03 | Added Wave Coordination (A code removal, B archive, C performance, D docs, E air-gap kit; sequential). No status changes. | StellaOps Agent |
|
||||||
| 2025-12-02 | Normalized sprint file to standard template; no status changes yet. | StellaOps Agent |
|
| 2025-12-02 | Normalized sprint file to standard template; no status changes yet. | StellaOps Agent |
|
||||||
| 2025-12-06 | Wave A kickoff: PG-T7.1.1 set to DOING; confirming module cutovers done; prep removal checklist and impact scan. | Project Mgmt |
|
| 2025-12-06 | Wave A kickoff: PG-T7.1.1 set to DOING; confirming module cutovers done; prep removal checklist and impact scan. | Project Mgmt |
|
||||||
@@ -125,8 +126,13 @@
|
|||||||
| 2025-12-06 | PG-T7.1.5b set to DOING; began wiring Postgres document store (DI registration, repository find) to replace Mongo bindings. | Concelier Guild |
|
| 2025-12-06 | PG-T7.1.5b set to DOING; began wiring Postgres document store (DI registration, repository find) to replace Mongo bindings. | Concelier Guild |
|
||||||
| 2025-12-06 | Concelier shim extended: MongoCompat now carries merge events/alias constants; Postgres storage DI uses PostgresDocumentStore; Source repository lookup fixed; Merge + Storage.Postgres projects now build. Full solution still hits pre-existing NU1608 version conflicts in crypto plugins (out of Concelier scope). | Concelier Guild |
|
| 2025-12-06 | Concelier shim extended: MongoCompat now carries merge events/alias constants; Postgres storage DI uses PostgresDocumentStore; Source repository lookup fixed; Merge + Storage.Postgres projects now build. Full solution still hits pre-existing NU1608 version conflicts in crypto plugins (out of Concelier scope). | Concelier Guild |
|
||||||
| 2025-12-07 | Concelier Postgres store now also implements legacy `IAdvisoryStore` and is registered as such; DI updated. Added repo-wide restore fallback suppression to unblock Postgres storage build (plugin/provenance now restore without VS fallback path). Storage.Postgres builds clean; remaining full-solution build blockers are crypto NU1608 version constraints (out of scope here). | Concelier Guild |
|
| 2025-12-07 | Concelier Postgres store now also implements legacy `IAdvisoryStore` and is registered as such; DI updated. Added repo-wide restore fallback suppression to unblock Postgres storage build (plugin/provenance now restore without VS fallback path). Storage.Postgres builds clean; remaining full-solution build blockers are crypto NU1608 version constraints (out of scope here). | Concelier Guild |
|
||||||
|
| 2025-12-07 | Postgres raw/state wiring: RawDocumentStorage now scoped with DocumentStore fallback, connectors/exporters persist payload bytes with GUID payload IDs, Postgres source-state adapter registered, and DualWrite advisory store now Postgres-only. Full WebService build still red on result-type aliases and legacy Mongo bootstrap hooks; follow-up needed before PG-T7.1.5b can close. | Concelier Guild |
|
||||||
|
| 2025-12-07 | NuGet cache reset and restore retry: cleared locals into `.nuget/packages.clean`, restored Concelier solution with fallback disabled, and reran build. Restore now clean; build failing on Mongo shim namespace ambiguity (Documents/Dtos aliases), missing WebService result wrapper types, and remaining Mongo bootstrap hooks. | Concelier Guild |
|
||||||
|
| 2025-12-07 | Cached Microsoft.Extensions.* 10.0.0 packages locally and refactored WebService result aliases/Mongo bootstrap bypass; `StellaOps.Concelier.WebService` now builds green against Postgres-only DI. | Concelier Guild |
|
||||||
|
| 2025-12-07 | Full `StellaOps.Concelier.sln` build still red: MongoCompat `DocumentStatuses` conflicts with Connector.Common, compat Bson stubs lack BinaryData/Elements/GetValue/IsBsonNull, `DtoRecord` fields immutable, JpFlag store types missing, and Concelier.Testing + SourceState tests still depend on Mongo driver/AddMongoStorage. PG-T7.1.5c remains TODO pending compat shim or Postgres fixture migration. | Concelier Guild |
|
||||||
|
|
||||||
## Decisions & Risks
|
## Decisions & Risks
|
||||||
|
- BLOCKER: Concelier solution build remains red: MongoCompat `DocumentStatuses` clashes with Connector.Common, Bson stubs miss BinaryData/Elements/GetValue/IsBsonNull, `DtoRecord` lacks mutable schema fields, JpFlag store types absent, and Concelier.Testing/SourceState tests still depend on Mongo driver/AddMongoStorage. PG-T7.1.5c must land compat shim or Postgres fixtures before deleting Storage.Mongo.
|
||||||
- Cleanup is strictly after all phases complete; do not start T7 tasks until module cutovers are DONE.
|
- Cleanup is strictly after all phases complete; do not start T7 tasks until module cutovers are DONE.
|
||||||
- Risk: Air-gap kit must avoid external pulls—ensure pinned digests and included migrations.
|
- Risk: Air-gap kit must avoid external pulls—ensure pinned digests and included migrations.
|
||||||
- BLOCKER: Concelier has pervasive Mongo references (connectors, exporters, tests, docs). Requires phased refactor plan (PG-T7.1.PLAN) before deletion to avoid breaking build.
|
- BLOCKER: Concelier has pervasive Mongo references (connectors, exporters, tests, docs). Requires phased refactor plan (PG-T7.1.PLAN) before deletion to avoid breaking build.
|
||||||
|
|||||||
@@ -572,7 +572,7 @@
|
|||||||
| DEVOPS-CLI-42-001 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild (ops/devops) | ops/devops | Add CLI golden output tests, parity diff automation, pack run CI harness, and artifact cache for remote mode. Dependencies: DEVOPS-CLI-41-001. | — | DVDO0102 |
|
| DEVOPS-CLI-42-001 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild (ops/devops) | ops/devops | Add CLI golden output tests, parity diff automation, pack run CI harness, and artifact cache for remote mode. Dependencies: DEVOPS-CLI-41-001. | — | DVDO0102 |
|
||||||
| DEVOPS-CLI-43-002 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild, Task Runner Guild (ops/devops) | ops/devops | Implement Task Pack chaos smoke in CI (random failure injection, resume, sealed-mode toggle) and publish evidence bundles for review. Dependencies: DEVOPS-CLI-43-001. | — | DVDO0102 |
|
| DEVOPS-CLI-43-002 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild, Task Runner Guild (ops/devops) | ops/devops | Implement Task Pack chaos smoke in CI (random failure injection, resume, sealed-mode toggle) and publish evidence bundles for review. Dependencies: DEVOPS-CLI-43-001. | — | DVDO0102 |
|
||||||
| DEVOPS-CLI-43-003 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild, DevEx/CLI Guild (ops/devops) | ops/devops | Integrate CLI golden output/parity diff automation into release gating; export parity report artifact consumed by Console Downloads workspace. Dependencies: DEVOPS-CLI-43-002. | — | DVDO0102 |
|
| DEVOPS-CLI-43-003 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild, DevEx/CLI Guild (ops/devops) | ops/devops | Integrate CLI golden output/parity diff automation into release gating; export parity report artifact consumed by Console Downloads workspace. Dependencies: DEVOPS-CLI-43-002. | — | DVDO0102 |
|
||||||
| DEVOPS-CONSOLE-23-001 | DOING (runner+PR 2025-12-07) | 2025-12-07 | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild · Console Guild | ops/devops | Offline runner spec + Playwright seeding helper; console CI now PR-triggered (`.gitea/workflows/console-ci.yml`) assuming runner image has baked cache. | Needs runner cache bake | DVDO0104 |
|
| DEVOPS-CONSOLE-23-001 | DOING (runner+PR 2025-12-07) | 2025-12-07 | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild · Console Guild | ops/devops | Offline runner spec + Playwright seeding helper; console CI now PR-triggered (`.gitea/workflows/console-ci.yml`) assuming runner image has baked cache. | Needs runner cache bake | DVDO0104 |
|
||||||
| DEVOPS-CONSOLE-23-002 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild | ops/devops | Produce `stella-console` container build + Helm chart overlays with deterministic digests, SBOM/provenance artefacts, and offline bundle packaging scripts. Dependencies: DEVOPS-CONSOLE-23-001. | Depends on #2 | DVDO0104 |
|
| DEVOPS-CONSOLE-23-002 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild | ops/devops | Produce `stella-console` container build + Helm chart overlays with deterministic digests, SBOM/provenance artefacts, and offline bundle packaging scripts. Dependencies: DEVOPS-CONSOLE-23-001. | Depends on #2 | DVDO0104 |
|
||||||
| DEVOPS-CONTAINERS-44-001 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild | ops/devops | Automate multi-arch image builds with buildx, SBOM generation, cosign signing, and signature verification in CI. | Wait for COWB0101 base image | DVDO0104 |
|
| DEVOPS-CONTAINERS-44-001 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild | ops/devops | Automate multi-arch image builds with buildx, SBOM generation, cosign signing, and signature verification in CI. | Wait for COWB0101 base image | DVDO0104 |
|
||||||
| DEVOPS-CONTAINERS-45-001 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild | ops/devops | Add Compose and Helm smoke tests (fresh VM + kind cluster) to CI; publish test artifacts and logs. Dependencies: DEVOPS-CONTAINERS-44-001. | Depends on #4 | DVDO0104 |
|
| DEVOPS-CONTAINERS-45-001 | TODO | | SPRINT_0504_0001_0001_ops_devops_ii | DevOps Guild | ops/devops | Add Compose and Helm smoke tests (fresh VM + kind cluster) to CI; publish test artifacts and logs. Dependencies: DEVOPS-CONTAINERS-44-001. | Depends on #4 | DVDO0104 |
|
||||||
@@ -1979,8 +1979,8 @@
|
|||||||
| UI-AOC-19-002 | TODO | | SPRINT_0209_0001_0001_ui_i | UI Guild (src/UI/StellaOps.UI) | src/UI/StellaOps.UI | Implement violation drill-down view highlighting offending document fields and provenance metadata. Dependencies: UI-AOC-19-001. | | |
|
| UI-AOC-19-002 | TODO | | SPRINT_0209_0001_0001_ui_i | UI Guild (src/UI/StellaOps.UI) | src/UI/StellaOps.UI | Implement violation drill-down view highlighting offending document fields and provenance metadata. Dependencies: UI-AOC-19-001. | | |
|
||||||
| UI-AOC-19-003 | TODO | | SPRINT_0209_0001_0001_ui_i | UI Guild (src/UI/StellaOps.UI) | src/UI/StellaOps.UI | Add "Verify last 24h" action triggering AOC verifier endpoint and surfacing CLI parity guidance. Dependencies: UI-AOC-19-002. | | |
|
| UI-AOC-19-003 | TODO | | SPRINT_0209_0001_0001_ui_i | UI Guild (src/UI/StellaOps.UI) | src/UI/StellaOps.UI | Add "Verify last 24h" action triggering AOC verifier endpoint and surfacing CLI parity guidance. Dependencies: UI-AOC-19-002. | | |
|
||||||
| UI-CLI-401-007 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | UI & CLI Guilds (`src/Cli/StellaOps.Cli`, `src/UI/StellaOps.UI`) | `src/Cli/StellaOps.Cli`, `src/UI/StellaOps.UI` | Implement CLI `stella graph explain` + UI explain drawer showing signed call-path, predicates, runtime hits, and DSSE pointers; include counterfactual controls. | | |
|
| UI-CLI-401-007 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | UI & CLI Guilds (`src/Cli/StellaOps.Cli`, `src/UI/StellaOps.UI`) | `src/Cli/StellaOps.Cli`, `src/UI/StellaOps.UI` | Implement CLI `stella graph explain` + UI explain drawer showing signed call-path, predicates, runtime hits, and DSSE pointers; include counterfactual controls. | | |
|
||||||
| UI-DOCS-0001 | TODO | | SPRINT_331_docs_modules_ui | Docs Guild (docs/modules/ui) | docs/modules/ui | | | |
|
| UI-DOCS-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_331_docs_modules_ui | Docs Guild (docs/modules/ui) | docs/modules/ui | | | |
|
||||||
| UI-ENG-0001 | TODO | | SPRINT_331_docs_modules_ui | Module Team (docs/modules/ui) | docs/modules/ui | | | |
|
| UI-ENG-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_331_docs_modules_ui | Module Team (docs/modules/ui) | docs/modules/ui | | | |
|
||||||
| UI-ENTROPY-40-001 | TODO | | SPRINT_0209_0001_0001_ui_i | UI Guild (src/UI/StellaOps.UI) | src/UI/StellaOps.UI | Visualise entropy analysis per image (layer donut, file heatmaps, """Why risky?""" chips) in Vulnerability Explorer and scan details, including opaque byte ratios and detector hints (see `docs/modules/scanner/entropy.md`). | | |
|
| UI-ENTROPY-40-001 | TODO | | SPRINT_0209_0001_0001_ui_i | UI Guild (src/UI/StellaOps.UI) | src/UI/StellaOps.UI | Visualise entropy analysis per image (layer donut, file heatmaps, """Why risky?""" chips) in Vulnerability Explorer and scan details, including opaque byte ratios and detector hints (see `docs/modules/scanner/entropy.md`). | | |
|
||||||
| UI-ENTROPY-40-002 | TODO | | SPRINT_0209_0001_0001_ui_i | UI Guild, Policy Guild (src/UI/StellaOps.UI) | src/UI/StellaOps.UI | Add policy banners/tooltips explaining entropy penalties (block/warn thresholds, mitigation steps) and link to raw `entropy.report.json` evidence downloads (`docs/modules/scanner/entropy.md`). Dependencies: UI-ENTROPY-40-001. | | |
|
| UI-ENTROPY-40-002 | TODO | | SPRINT_0209_0001_0001_ui_i | UI Guild, Policy Guild (src/UI/StellaOps.UI) | src/UI/StellaOps.UI | Add policy banners/tooltips explaining entropy penalties (block/warn thresholds, mitigation steps) and link to raw `entropy.report.json` evidence downloads (`docs/modules/scanner/entropy.md`). Dependencies: UI-ENTROPY-40-001. | | |
|
||||||
| UI-EXC-25-001 | TODO | | SPRINT_0209_0001_0001_ui_i | UI Guild, Governance Guild (src/UI/StellaOps.UI) | src/UI/StellaOps.UI | Build Exception Center (list + kanban) with filters, sorting, workflow transitions, and audit views. | | |
|
| UI-EXC-25-001 | TODO | | SPRINT_0209_0001_0001_ui_i | UI Guild, Governance Guild (src/UI/StellaOps.UI) | src/UI/StellaOps.UI | Build Exception Center (list + kanban) with filters, sorting, workflow transitions, and audit views. | | |
|
||||||
@@ -1998,7 +1998,7 @@
|
|||||||
| UI-LNM-22-002 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement filters (source, severity bucket, conflict-only, CVSS vector presence) and pagination/lazy loading for large linksets. Docs depend on finalized filtering UX. Dependencies: UI-LNM-22-001. | | |
|
| UI-LNM-22-002 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement filters (source, severity bucket, conflict-only, CVSS vector presence) and pagination/lazy loading for large linksets. Docs depend on finalized filtering UX. Dependencies: UI-LNM-22-001. | | |
|
||||||
| UI-LNM-22-003 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild, Excititor Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add VEX tab with status/justification summaries, conflict indicators, and export actions. Required for `DOCS-LNM-22-005` coverage of VEX evidence tab. Dependencies: UI-LNM-22-002. | | |
|
| UI-LNM-22-003 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild, Excititor Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add VEX tab with status/justification summaries, conflict indicators, and export actions. Required for `DOCS-LNM-22-005` coverage of VEX evidence tab. Dependencies: UI-LNM-22-002. | | |
|
||||||
| UI-LNM-22-004 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Provide permalink + copy-to-clipboard for selected component/linkset/policy combination; ensure high-contrast theme support. Dependencies: UI-LNM-22-003. | | |
|
| UI-LNM-22-004 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Provide permalink + copy-to-clipboard for selected component/linkset/policy combination; ensure high-contrast theme support. Dependencies: UI-LNM-22-003. | | |
|
||||||
| UI-OPS-0001 | TODO | | SPRINT_331_docs_modules_ui | Ops Guild (docs/modules/ui) | docs/modules/ui | | | |
|
| UI-OPS-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_331_docs_modules_ui | Ops Guild (docs/modules/ui) | docs/modules/ui | | | |
|
||||||
| UI-ORCH-32-001 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild, Console Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Update Console RBAC mappings to surface `Orch.Viewer`, request `orch:read` scope in token flows, and gate dashboard access/messaging accordingly. | | |
|
| UI-ORCH-32-001 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild, Console Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Update Console RBAC mappings to surface `Orch.Viewer`, request `orch:read` scope in token flows, and gate dashboard access/messaging accordingly. | | |
|
||||||
| UI-POLICY-13-007 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Surface policy confidence metadata (band, age, quiet provenance) on preview and report views. | | |
|
| UI-POLICY-13-007 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Surface policy confidence metadata (band, age, quiet provenance) on preview and report views. | | |
|
||||||
| UI-POLICY-20-001 | DONE | 2025-12-05 | SPRINT_0210_0001_0002_ui_ii | UI Guild | src/Web/StellaOps.Web | Ship Monaco-based policy editor with DSL syntax highlighting, diagnostics, and checklist sidebar. | POLICY-13-007 | UIPD0101 |
|
| UI-POLICY-20-001 | DONE | 2025-12-05 | SPRINT_0210_0001_0002_ui_ii | UI Guild | src/Web/StellaOps.Web | Ship Monaco-based policy editor with DSL syntax highlighting, diagnostics, and checklist sidebar. | POLICY-13-007 | UIPD0101 |
|
||||||
@@ -2118,9 +2118,9 @@
|
|||||||
| WEB-EXC-25-001 | BLOCKED | 2025-12-06 | SPRINT_0212_0001_0001_web_i | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement `/exceptions` API (create, propose, approve, revoke, list, history) with validation, pagination, and audit logging. | | Draft placeholder docs+sample added (`docs/api/console/exception-schema.md`, `docs/api/console/samples/exception-schema-sample.json`); awaiting official schema/scopes/audit rules. |
|
| WEB-EXC-25-001 | BLOCKED | 2025-12-06 | SPRINT_0212_0001_0001_web_i | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement `/exceptions` API (create, propose, approve, revoke, list, history) with validation, pagination, and audit logging. | | Draft placeholder docs+sample added (`docs/api/console/exception-schema.md`, `docs/api/console/samples/exception-schema-sample.json`); awaiting official schema/scopes/audit rules. |
|
||||||
| WEB-EXC-25-002 | BLOCKED | 2025-11-30 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Extend `/policy/effective` and `/policy/simulate` responses to include exception metadata and accept overrides for simulations. Dependencies: WEB-EXC-25-001. | | |
|
| WEB-EXC-25-002 | BLOCKED | 2025-11-30 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Extend `/policy/effective` and `/policy/simulate` responses to include exception metadata and accept overrides for simulations. Dependencies: WEB-EXC-25-001. | | |
|
||||||
| WEB-EXC-25-003 | TODO | | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, Platform Events Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Publish `exception.*` events, integrate with notification hooks, enforce rate limits. Dependencies: WEB-EXC-25-002. | | |
|
| WEB-EXC-25-003 | TODO | | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, Platform Events Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Publish `exception.*` events, integrate with notification hooks, enforce rate limits. Dependencies: WEB-EXC-25-002. | | |
|
||||||
| WEB-EXPORT-35-001 | TODO | | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Surface Export Center APIs (profiles/runs/download) through gateway with tenant scoping, streaming support, and viewer/operator scope checks. | | |
|
| WEB-EXPORT-35-001 | BLOCKED | 2025-12-07 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Surface Export Center APIs (profiles/runs/download) through gateway with tenant scoping, streaming support, and viewer/operator scope checks. | Gateway contract draft v0.9 in docs/api/gateway/export-center.md; waiting guild sign-off | |
|
||||||
| WEB-EXPORT-36-001 | TODO | | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add distribution routes (OCI/object storage), manifest/provenance proxies, and signed URL generation. Dependencies: WEB-EXPORT-35-001. | | |
|
| WEB-EXPORT-36-001 | BLOCKED | 2025-12-07 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add distribution routes (OCI/object storage), manifest/provenance proxies, and signed URL generation. Dependencies: WEB-EXPORT-35-001. | Blocked by 35-001; distro signing/limits pending same contract | |
|
||||||
| WEB-EXPORT-37-001 | TODO | | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Expose scheduling, retention, encryption parameters, and verification endpoints with admin scope enforcement and audit logs. Dependencies: WEB-EXPORT-36-001. | | |
|
| WEB-EXPORT-37-001 | BLOCKED | 2025-12-07 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Expose scheduling, retention, encryption parameters, and verification endpoints with admin scope enforcement and audit logs. Dependencies: WEB-EXPORT-36-001. | Blocked by 36-001; retention/encryption params not frozen | |
|
||||||
| WEB-GRAPH-21-001 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, Graph Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add gateway routes for graph versions/viewport/node/path/diff/export endpoints with tenant enforcement, scope checks, and streaming responses; proxy Policy Engine diff toggles without inline logic. Adopt `StellaOpsScopes` constants for RBAC enforcement. | | |
|
| WEB-GRAPH-21-001 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, Graph Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add gateway routes for graph versions/viewport/node/path/diff/export endpoints with tenant enforcement, scope checks, and streaming responses; proxy Policy Engine diff toggles without inline logic. Adopt `StellaOpsScopes` constants for RBAC enforcement. | | |
|
||||||
| WEB-GRAPH-21-002 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement bbox/zoom/path parameter validation, pagination tokens, and deterministic ordering; add contract tests for boundary conditions. Dependencies: WEB-GRAPH-21-001. | | |
|
| WEB-GRAPH-21-002 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement bbox/zoom/path parameter validation, pagination tokens, and deterministic ordering; add contract tests for boundary conditions. Dependencies: WEB-GRAPH-21-001. | | |
|
||||||
| WEB-GRAPH-21-003 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, QA Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Map graph service errors to `ERR_Graph_*`, support GraphML/JSONL export streaming, and document rate limits. Dependencies: WEB-GRAPH-21-002. | | |
|
| WEB-GRAPH-21-003 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, QA Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Map graph service errors to `ERR_Graph_*`, support GraphML/JSONL export streaming, and document rate limits. Dependencies: WEB-GRAPH-21-002. | | |
|
||||||
@@ -2203,11 +2203,11 @@
|
|||||||
| WORKER-PY-33-002 | DONE | | SPRINT_0153_0001_0003_orchestrator_iii | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python) | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python | Provide error classification/backoff helper mapping to orchestrator codes, including jittered retries and structured failure reports. Dependencies: WORKER-PY-33-001. | | |
|
| WORKER-PY-33-002 | DONE | | SPRINT_0153_0001_0003_orchestrator_iii | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python) | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python | Provide error classification/backoff helper mapping to orchestrator codes, including jittered retries and structured failure reports. Dependencies: WORKER-PY-33-001. | | |
|
||||||
| WORKER-PY-34-001 | DONE | | SPRINT_0153_0001_0003_orchestrator_iii | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python) | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python | Implement backfill range iteration, watermark handshake, and artifact dedupe verification utilities for Python workers. Dependencies: WORKER-PY-33-002. | | |
|
| WORKER-PY-34-001 | DONE | | SPRINT_0153_0001_0003_orchestrator_iii | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python) | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python | Implement backfill range iteration, watermark handshake, and artifact dedupe verification utilities for Python workers. Dependencies: WORKER-PY-33-002. | | |
|
||||||
| ZAS-002 | TODO | | SPRINT_400_runtime_facts_static_callgraph_union | Zastava Observer Guild (`src/Zastava/StellaOps.Zastava.Observer`, `docs/modules/zastava/architecture.md`, `docs/reachability/function-level-evidence.md`) | `src/Zastava/StellaOps.Zastava.Observer`, `docs/modules/zastava/architecture.md`, `docs/reachability/function-level-evidence.md` | | | |
|
| ZAS-002 | TODO | | SPRINT_400_runtime_facts_static_callgraph_union | Zastava Observer Guild (`src/Zastava/StellaOps.Zastava.Observer`, `docs/modules/zastava/architecture.md`, `docs/reachability/function-level-evidence.md`) | `src/Zastava/StellaOps.Zastava.Observer`, `docs/modules/zastava/architecture.md`, `docs/reachability/function-level-evidence.md` | | | |
|
||||||
| ZASTAVA-DOCS-0001 | TODO | | SPRINT_335_docs_modules_zastava | Docs Guild (docs/modules/zastava) | docs/modules/zastava | See ./AGENTS.md | | |
|
| ZASTAVA-DOCS-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_335_docs_modules_zastava | Docs Guild (docs/modules/zastava) | docs/modules/zastava | See ./AGENTS.md | | |
|
||||||
| ZASTAVA-ENG-0001 | TODO | | SPRINT_335_docs_modules_zastava | Module Team (docs/modules/zastava) | docs/modules/zastava | Update status via ./AGENTS.md workflow | | |
|
| ZASTAVA-ENG-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_335_docs_modules_zastava | Module Team (docs/modules/zastava) | docs/modules/zastava | Update status via ./AGENTS.md workflow | | |
|
||||||
| ZASTAVA-ENV-01 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Observer adoption of Surface.Env helpers paused while Surface.FS cache contract finalizes. | | |
|
| ZASTAVA-ENV-01 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Observer adoption of Surface.Env helpers paused while Surface.FS cache contract finalizes. | | |
|
||||||
| ZASTAVA-ENV-02 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Webhook helper migration follows ENV-01 completion. | | |
|
| ZASTAVA-ENV-02 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Webhook helper migration follows ENV-01 completion. | | |
|
||||||
| ZASTAVA-OPS-0001 | TODO | | SPRINT_335_docs_modules_zastava | Ops Guild (docs/modules/zastava) | docs/modules/zastava | Sync outcomes back to ../.. | | |
|
| ZASTAVA-OPS-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_335_docs_modules_zastava | Ops Guild (docs/modules/zastava) | docs/modules/zastava | Sync outcomes back to ../.. | | |
|
||||||
| ZASTAVA-REACH-201-001 | TODO | | SPRINT_400_runtime_facts_static_callgraph_union | Zastava Observer Guild (`src/Zastava/StellaOps.Zastava.Observer`) | `src/Zastava/StellaOps.Zastava.Observer` | Implement runtime symbol sampling in `StellaOps.Zastava.Observer` (EntryTrace-aware shell AST + build-id capture) and stream ND-JSON batches to Signals `/runtime-facts`, including CAS pointers for traces. Update runbook + config references. | | |
|
| ZASTAVA-REACH-201-001 | TODO | | SPRINT_400_runtime_facts_static_callgraph_union | Zastava Observer Guild (`src/Zastava/StellaOps.Zastava.Observer`) | `src/Zastava/StellaOps.Zastava.Observer` | Implement runtime symbol sampling in `StellaOps.Zastava.Observer` (EntryTrace-aware shell AST + build-id capture) and stream ND-JSON batches to Signals `/runtime-facts`, including CAS pointers for traces. Update runbook + config references. | | |
|
||||||
| ZASTAVA-SECRETS-01 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Surface.Secrets wiring for Observer pending published cache endpoints. | | |
|
| ZASTAVA-SECRETS-01 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Surface.Secrets wiring for Observer pending published cache endpoints. | | |
|
||||||
| ZASTAVA-SECRETS-02 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Webhook secret retrieval cascades from SECRETS-01 work. | | |
|
| ZASTAVA-SECRETS-02 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Webhook secret retrieval cascades from SECRETS-01 work. | | |
|
||||||
@@ -4171,12 +4171,12 @@
|
|||||||
| TIMELINE-OBS-53-001 | TODO | | SPRINT_160_export_evidence | Timeline Indexer + Evidence Locker Guilds | | Timeline Indexer + Evidence Locker Guilds | | |
|
| TIMELINE-OBS-53-001 | TODO | | SPRINT_160_export_evidence | Timeline Indexer + Evidence Locker Guilds | | Timeline Indexer + Evidence Locker Guilds | | |
|
||||||
| UI-401-027 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | UI Guild · CLI Guild (`src/UI/StellaOps.UI`, `src/Cli/StellaOps.Cli`, `docs/uncertainty/README.md`) | `src/UI/StellaOps.UI`, `src/Cli/StellaOps.Cli`, `docs/uncertainty/README.md` | | | |
|
| UI-401-027 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | UI Guild · CLI Guild (`src/UI/StellaOps.UI`, `src/Cli/StellaOps.Cli`, `docs/uncertainty/README.md`) | `src/UI/StellaOps.UI`, `src/Cli/StellaOps.Cli`, `docs/uncertainty/README.md` | | | |
|
||||||
| UI-CLI-401-007 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | UI & CLI Guilds (`src/Cli/StellaOps.Cli`, `src/UI/StellaOps.UI`) | `src/Cli/StellaOps.Cli`, `src/UI/StellaOps.UI` | Implement CLI `stella graph explain` + UI explain drawer showing signed call-path, predicates, runtime hits, and DSSE pointers; include counterfactual controls. | | |
|
| UI-CLI-401-007 | TODO | | SPRINT_0401_0001_0001_reachability_evidence_chain | UI & CLI Guilds (`src/Cli/StellaOps.Cli`, `src/UI/StellaOps.UI`) | `src/Cli/StellaOps.Cli`, `src/UI/StellaOps.UI` | Implement CLI `stella graph explain` + UI explain drawer showing signed call-path, predicates, runtime hits, and DSSE pointers; include counterfactual controls. | | |
|
||||||
| UI-DOCS-0001 | TODO | | SPRINT_331_docs_modules_ui | Docs Guild (docs/modules/ui) | docs/modules/ui | | | |
|
| UI-DOCS-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_331_docs_modules_ui | Docs Guild (docs/modules/ui) | docs/modules/ui | | | |
|
||||||
| UI-ENG-0001 | TODO | | SPRINT_331_docs_modules_ui | Module Team (docs/modules/ui) | docs/modules/ui | | | |
|
| UI-ENG-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_331_docs_modules_ui | Module Team (docs/modules/ui) | docs/modules/ui | | | |
|
||||||
| UI-LNM-22-002 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement filters (source, severity bucket, conflict-only, CVSS vector presence) and pagination/lazy loading for large linksets. Docs depend on finalized filtering UX. Dependencies: UI-LNM-22-001. | | |
|
| UI-LNM-22-002 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement filters (source, severity bucket, conflict-only, CVSS vector presence) and pagination/lazy loading for large linksets. Docs depend on finalized filtering UX. Dependencies: UI-LNM-22-001. | | |
|
||||||
| UI-LNM-22-003 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild, Excititor Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add VEX tab with status/justification summaries, conflict indicators, and export actions. Required for `DOCS-LNM-22-005` coverage of VEX evidence tab. Dependencies: UI-LNM-22-002. | | |
|
| UI-LNM-22-003 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild, Excititor Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add VEX tab with status/justification summaries, conflict indicators, and export actions. Required for `DOCS-LNM-22-005` coverage of VEX evidence tab. Dependencies: UI-LNM-22-002. | | |
|
||||||
| UI-LNM-22-004 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Provide permalink + copy-to-clipboard for selected component/linkset/policy combination; ensure high-contrast theme support. Dependencies: UI-LNM-22-003. | | |
|
| UI-LNM-22-004 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Provide permalink + copy-to-clipboard for selected component/linkset/policy combination; ensure high-contrast theme support. Dependencies: UI-LNM-22-003. | | |
|
||||||
| UI-OPS-0001 | TODO | | SPRINT_331_docs_modules_ui | Ops Guild (docs/modules/ui) | docs/modules/ui | | | |
|
| UI-OPS-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_331_docs_modules_ui | Ops Guild (docs/modules/ui) | docs/modules/ui | | | |
|
||||||
| UI-ORCH-32-001 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild, Console Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Update Console RBAC mappings to surface `Orch.Viewer`, request `orch:read` scope in token flows, and gate dashboard access/messaging accordingly. | | |
|
| UI-ORCH-32-001 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild, Console Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Update Console RBAC mappings to surface `Orch.Viewer`, request `orch:read` scope in token flows, and gate dashboard access/messaging accordingly. | | |
|
||||||
| UI-POLICY-13-007 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Surface policy confidence metadata (band, age, quiet provenance) on preview and report views. | | |
|
| UI-POLICY-13-007 | DONE | 2025-12-04 | SPRINT_0210_0001_0002_ui_ii | UI Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Surface policy confidence metadata (band, age, quiet provenance) on preview and report views. | | |
|
||||||
| UI-POLICY-20-001 | DONE | 2025-12-05 | SPRINT_0210_0001_0002_ui_ii | UI Guild | src/Web/StellaOps.Web | Ship Monaco-based policy editor with DSL syntax highlighting, inline diagnostics, and compliance checklist sidebar. Dependencies: UI-POLICY-13-007. | Depends on Policy DSL schema | |
|
| UI-POLICY-20-001 | DONE | 2025-12-05 | SPRINT_0210_0001_0002_ui_ii | UI Guild | src/Web/StellaOps.Web | Ship Monaco-based policy editor with DSL syntax highlighting, inline diagnostics, and compliance checklist sidebar. Dependencies: UI-POLICY-13-007. | Depends on Policy DSL schema | |
|
||||||
@@ -4293,9 +4293,9 @@
|
|||||||
| WEB-EXC-25-001 | BLOCKED | 2025-12-06 | SPRINT_0212_0001_0001_web_i | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement `/exceptions` API (create, propose, approve, revoke, list, history) with validation, pagination, and audit logging. | | Waiting on exception schema + policy scopes and audit requirements. |
|
| WEB-EXC-25-001 | BLOCKED | 2025-12-06 | SPRINT_0212_0001_0001_web_i | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement `/exceptions` API (create, propose, approve, revoke, list, history) with validation, pagination, and audit logging. | | Waiting on exception schema + policy scopes and audit requirements. |
|
||||||
| WEB-EXC-25-002 | BLOCKED | 2025-11-30 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Extend `/policy/effective` and `/policy/simulate` responses to include exception metadata and accept overrides for simulations. Dependencies: WEB-EXC-25-001. | | |
|
| WEB-EXC-25-002 | BLOCKED | 2025-11-30 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Extend `/policy/effective` and `/policy/simulate` responses to include exception metadata and accept overrides for simulations. Dependencies: WEB-EXC-25-001. | | |
|
||||||
| WEB-EXC-25-003 | TODO | | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, Platform Events Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Publish `exception.*` events, integrate with notification hooks, enforce rate limits. Dependencies: WEB-EXC-25-002. | | |
|
| WEB-EXC-25-003 | TODO | | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, Platform Events Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Publish `exception.*` events, integrate with notification hooks, enforce rate limits. Dependencies: WEB-EXC-25-002. | | |
|
||||||
| WEB-EXPORT-35-001 | TODO | | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Surface Export Center APIs (profiles/runs/download) through gateway with tenant scoping, streaming support, and viewer/operator scope checks. | | |
|
| WEB-EXPORT-35-001 | BLOCKED | 2025-12-07 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Surface Export Center APIs (profiles/runs/download) through gateway with tenant scoping, streaming support, and viewer/operator scope checks. | Gateway contract draft v0.9 in docs/api/gateway/export-center.md; waiting guild sign-off | |
|
||||||
| WEB-EXPORT-36-001 | TODO | | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add distribution routes (OCI/object storage), manifest/provenance proxies, and signed URL generation. Dependencies: WEB-EXPORT-35-001. | | |
|
| WEB-EXPORT-36-001 | BLOCKED | 2025-12-07 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add distribution routes (OCI/object storage), manifest/provenance proxies, and signed URL generation. Dependencies: WEB-EXPORT-35-001. | Blocked by 35-001; distro signing/limits pending same contract | |
|
||||||
| WEB-EXPORT-37-001 | TODO | | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Expose scheduling, retention, encryption parameters, and verification endpoints with admin scope enforcement and audit logs. Dependencies: WEB-EXPORT-36-001. | | |
|
| WEB-EXPORT-37-001 | BLOCKED | 2025-12-07 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Expose scheduling, retention, encryption parameters, and verification endpoints with admin scope enforcement and audit logs. Dependencies: WEB-EXPORT-36-001. | Blocked by 36-001; retention/encryption params not frozen | |
|
||||||
| WEB-GRAPH-21-001 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, Graph Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add gateway routes for graph versions/viewport/node/path/diff/export endpoints with tenant enforcement, scope checks, and streaming responses; proxy Policy Engine diff toggles without inline logic. Adopt `StellaOpsScopes` constants for RBAC enforcement. | | |
|
| WEB-GRAPH-21-001 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, Graph Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Add gateway routes for graph versions/viewport/node/path/diff/export endpoints with tenant enforcement, scope checks, and streaming responses; proxy Policy Engine diff toggles without inline logic. Adopt `StellaOpsScopes` constants for RBAC enforcement. | | |
|
||||||
| WEB-GRAPH-21-002 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement bbox/zoom/path parameter validation, pagination tokens, and deterministic ordering; add contract tests for boundary conditions. Dependencies: WEB-GRAPH-21-001. | | |
|
| WEB-GRAPH-21-002 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Implement bbox/zoom/path parameter validation, pagination tokens, and deterministic ordering; add contract tests for boundary conditions. Dependencies: WEB-GRAPH-21-001. | | |
|
||||||
| WEB-GRAPH-21-003 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, QA Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Map graph service errors to `ERR_Graph_*`, support GraphML/JSONL export streaming, and document rate limits. Dependencies: WEB-GRAPH-21-002. | | |
|
| WEB-GRAPH-21-003 | BLOCKED | 2025-10-27 | SPRINT_0213_0001_0002_web_ii | BE-Base Platform Guild, QA Guild (src/Web/StellaOps.Web) | src/Web/StellaOps.Web | Map graph service errors to `ERR_Graph_*`, support GraphML/JSONL export streaming, and document rate limits. Dependencies: WEB-GRAPH-21-002. | | |
|
||||||
@@ -4378,11 +4378,11 @@
|
|||||||
| WORKER-PY-33-002 | DONE | | SPRINT_0153_0001_0003_orchestrator_iii | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python) | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python | Provide error classification/backoff helper mapping to orchestrator codes, including jittered retries and structured failure reports. Dependencies: WORKER-PY-33-001. | | |
|
| WORKER-PY-33-002 | DONE | | SPRINT_0153_0001_0003_orchestrator_iii | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python) | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python | Provide error classification/backoff helper mapping to orchestrator codes, including jittered retries and structured failure reports. Dependencies: WORKER-PY-33-001. | | |
|
||||||
| WORKER-PY-34-001 | DONE | | SPRINT_0153_0001_0003_orchestrator_iii | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python) | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python | Implement backfill range iteration, watermark handshake, and artifact dedupe verification utilities for Python workers. Dependencies: WORKER-PY-33-002. | | |
|
| WORKER-PY-34-001 | DONE | | SPRINT_0153_0001_0003_orchestrator_iii | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python) | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python | Implement backfill range iteration, watermark handshake, and artifact dedupe verification utilities for Python workers. Dependencies: WORKER-PY-33-002. | | |
|
||||||
| ZAS-002 | TODO | | SPRINT_400_runtime_facts_static_callgraph_union | Zastava Observer Guild (`src/Zastava/StellaOps.Zastava.Observer`, `docs/modules/zastava/architecture.md`, `docs/reachability/function-level-evidence.md`) | `src/Zastava/StellaOps.Zastava.Observer`, `docs/modules/zastava/architecture.md`, `docs/reachability/function-level-evidence.md` | | | |
|
| ZAS-002 | TODO | | SPRINT_400_runtime_facts_static_callgraph_union | Zastava Observer Guild (`src/Zastava/StellaOps.Zastava.Observer`, `docs/modules/zastava/architecture.md`, `docs/reachability/function-level-evidence.md`) | `src/Zastava/StellaOps.Zastava.Observer`, `docs/modules/zastava/architecture.md`, `docs/reachability/function-level-evidence.md` | | | |
|
||||||
| ZASTAVA-DOCS-0001 | TODO | | SPRINT_335_docs_modules_zastava | Docs Guild (docs/modules/zastava) | docs/modules/zastava | See ./AGENTS.md | | |
|
| ZASTAVA-DOCS-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_335_docs_modules_zastava | Docs Guild (docs/modules/zastava) | docs/modules/zastava | See ./AGENTS.md | | |
|
||||||
| ZASTAVA-ENG-0001 | TODO | | SPRINT_335_docs_modules_zastava | Module Team (docs/modules/zastava) | docs/modules/zastava | Update status via ./AGENTS.md workflow | | |
|
| ZASTAVA-ENG-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_335_docs_modules_zastava | Module Team (docs/modules/zastava) | docs/modules/zastava | Update status via ./AGENTS.md workflow | | |
|
||||||
| ZASTAVA-ENV-01 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Observer adoption of Surface.Env helpers paused while Surface.FS cache contract finalizes. | | |
|
| ZASTAVA-ENV-01 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Observer adoption of Surface.Env helpers paused while Surface.FS cache contract finalizes. | | |
|
||||||
| ZASTAVA-ENV-02 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Webhook helper migration follows ENV-01 completion. | | |
|
| ZASTAVA-ENV-02 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Webhook helper migration follows ENV-01 completion. | | |
|
||||||
| ZASTAVA-OPS-0001 | TODO | | SPRINT_335_docs_modules_zastava | Ops Guild (docs/modules/zastava) | docs/modules/zastava | Sync outcomes back to ../.. | | |
|
| ZASTAVA-OPS-0001 | DONE (2025-11-30) | 2025-11-30 | SPRINT_335_docs_modules_zastava | Ops Guild (docs/modules/zastava) | docs/modules/zastava | Sync outcomes back to ../.. | | |
|
||||||
| ZASTAVA-REACH-201-001 | TODO | | SPRINT_400_runtime_facts_static_callgraph_union | Zastava Observer Guild (`src/Zastava/StellaOps.Zastava.Observer`) | `src/Zastava/StellaOps.Zastava.Observer` | Implement runtime symbol sampling in `StellaOps.Zastava.Observer` (EntryTrace-aware shell AST + build-id capture) and stream ND-JSON batches to Signals `/runtime-facts`, including CAS pointers for traces. Update runbook + config references. | | |
|
| ZASTAVA-REACH-201-001 | TODO | | SPRINT_400_runtime_facts_static_callgraph_union | Zastava Observer Guild (`src/Zastava/StellaOps.Zastava.Observer`) | `src/Zastava/StellaOps.Zastava.Observer` | Implement runtime symbol sampling in `StellaOps.Zastava.Observer` (EntryTrace-aware shell AST + build-id capture) and stream ND-JSON batches to Signals `/runtime-facts`, including CAS pointers for traces. Update runbook + config references. | | |
|
||||||
| ZASTAVA-SECRETS-01 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Surface.Secrets wiring for Observer pending published cache endpoints. | | |
|
| ZASTAVA-SECRETS-01 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Surface.Secrets wiring for Observer pending published cache endpoints. | | |
|
||||||
| ZASTAVA-SECRETS-02 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Webhook secret retrieval cascades from SECRETS-01 work. | | |
|
| ZASTAVA-SECRETS-02 | TODO | | SPRINT_0140_0001_0001_runtime_signals | | | Webhook secret retrieval cascades from SECRETS-01 work. | | |
|
||||||
|
|||||||
@@ -1,76 +1,82 @@
|
|||||||
# Concelier Backfill & Rollback Plan (STORE-AOC-19-005-DEV)
|
# Concelier Backfill & Rollback Plan (STORE-AOC-19-005-DEV, Postgres)
|
||||||
|
|
||||||
## Objective
|
## Objective
|
||||||
Prepare and rehearse the raw-linkset backfill/rollback so Concelier Mongo reflects Link-Not-Merge data deterministically across dev/stage. This runbook unblocks STORE-AOC-19-005-DEV.
|
Prepare and rehearse the raw Link-Not-Merge backfill/rollback so Concelier Postgres reflects the dataset deterministically across dev/stage. This replaces the prior Mongo workflow.
|
||||||
|
|
||||||
## Inputs
|
## Inputs
|
||||||
- Source dataset: staging export tarball `linksets-stage-backfill.tar.zst`.
|
- Dataset tarball: `out/linksets/linksets-stage-backfill.tar.zst`
|
||||||
- Expected placement: `out/linksets/linksets-stage-backfill.tar.zst`.
|
- Files expected inside: `linksets.ndjson`, `advisory_chunks.ndjson`, `manifest.json`
|
||||||
- Hash: record SHA-256 in this file once available (example below).
|
- Record SHA-256 of the tarball here when staged:
|
||||||
|
```
|
||||||
Example hash capture (replace with real):
|
$ sha256sum out/linksets/linksets-stage-backfill.tar.zst
|
||||||
```
|
2b43ef9b5694f59be8c1d513893c506b8d1b8de152d820937178070bfc00d0c0 out/linksets/linksets-stage-backfill.tar.zst
|
||||||
$ sha256sum out/linksets/linksets-stage-backfill.tar.zst
|
```
|
||||||
3ac7d1c8f4f7b5c5b27c1c7ac6d6e9b2a2d6d7a1a1c3f4e5b6c7d8e9f0a1b2c3 out/linksets/linksets-stage-backfill.tar.zst
|
- To regenerate the tarball deterministically from repo seeds: `./scripts/concelier/build-store-aoc-19-005-dataset.sh`
|
||||||
```
|
- To validate a tarball locally (counts + hashes): `./scripts/concelier/test-store-aoc-19-005-dataset.sh out/linksets/linksets-stage-backfill.tar.zst`
|
||||||
|
|
||||||
## Preflight
|
## Preflight
|
||||||
- Environment variables:
|
- Env:
|
||||||
- `CONCELIER_MONGO_URI` pointing to the target (dev or staging) Mongo.
|
- `PGURI` (or `CONCELIER_PG_URI`) pointing to the target Postgres instance.
|
||||||
- `CONCELIER_DB` (default `concelier`).
|
- `PGSCHEMA` (default `lnm_raw`) for staging tables.
|
||||||
- Take a snapshot of affected collections:
|
- Ensure maintenance window for bulk import; no concurrent writers to staging tables.
|
||||||
```
|
|
||||||
mongodump --uri "$CONCELIER_MONGO_URI" --db "$CONCELIER_DB" --collection linksets --collection advisory_chunks --out out/backups/pre-run
|
|
||||||
```
|
|
||||||
- Ensure write lock is acceptable for the maintenance window.
|
|
||||||
|
|
||||||
## Backfill steps
|
## Backfill steps (CI-ready)
|
||||||
|
|
||||||
|
### Preferred: CI/manual script
|
||||||
|
- `scripts/concelier/backfill-store-aoc-19-005.sh /path/to/linksets-stage-backfill.tar.zst`
|
||||||
|
- Env: `PGURI` (or `CONCELIER_PG_URI`), optional `PGSCHEMA` (default `lnm_raw`), optional `DRY_RUN=1` for extraction-only.
|
||||||
|
- The script:
|
||||||
|
- Extracts and validates required files.
|
||||||
|
- Creates/clears staging tables (`<schema>.linksets_raw`, `<schema>.advisory_chunks_raw`).
|
||||||
|
- Imports via `\copy` from TSV derived with `jq -rc '[._id, .] | @tsv'`.
|
||||||
|
- Prints counts and echoes the manifest.
|
||||||
|
|
||||||
|
### Manual steps (fallback)
|
||||||
1) Extract dataset:
|
1) Extract dataset:
|
||||||
```
|
```
|
||||||
mkdir -p out/linksets/extracted
|
mkdir -p out/linksets/extracted
|
||||||
tar -xf out/linksets/linksets-stage-backfill.tar.zst -C out/linksets/extracted
|
tar -xf out/linksets/linksets-stage-backfill.tar.zst -C out/linksets/extracted
|
||||||
```
|
```
|
||||||
2) Import linksets + chunks (bypass validation to preserve upstream IDs):
|
2) Create/truncate staging tables and import:
|
||||||
```
|
```
|
||||||
mongoimport --uri "$CONCELIER_MONGO_URI" --db "$CONCELIER_DB" \
|
psql "$PGURI" <<SQL
|
||||||
--collection linksets --file out/linksets/extracted/linksets.ndjson --mode=upsert --upsertFields=_id
|
create schema if not exists lnm_raw;
|
||||||
|
create table if not exists lnm_raw.linksets_raw (id text primary key, raw jsonb not null);
|
||||||
mongoimport --uri "$CONCELIER_MONGO_URI" --db "$CONCELIER_DB" \
|
create table if not exists lnm_raw.advisory_chunks_raw (id text primary key, raw jsonb not null);
|
||||||
--collection advisory_chunks --file out/linksets/extracted/advisory_chunks.ndjson --mode=upsert --upsertFields=_id
|
truncate table lnm_raw.linksets_raw;
|
||||||
|
truncate table lnm_raw.advisory_chunks_raw;
|
||||||
|
\copy lnm_raw.linksets_raw (id, raw) from program 'jq -rc ''[._id, .] | @tsv'' out/linksets/extracted/linksets.ndjson' with (format csv, delimiter E'\\t', quote '\"', escape '\"');
|
||||||
|
\copy lnm_raw.advisory_chunks_raw (id, raw) from program 'jq -rc ''[._id, .] | @tsv'' out/linksets/extracted/advisory_chunks.ndjson' with (format csv, delimiter E'\\t', quote '\"', escape '\"');
|
||||||
|
SQL
|
||||||
```
|
```
|
||||||
3) Verify counts vs manifest:
|
3) Verify counts vs manifest:
|
||||||
```
|
```
|
||||||
jq '.' out/linksets/extracted/manifest.json
|
jq '.' out/linksets/extracted/manifest.json
|
||||||
mongo --quiet "$CONCELIER_MONGO_URI/$CONCELIER_DB" --eval "db.linksets.countDocuments()"
|
psql -tA "$PGURI" -c "select 'linksets_raw='||count(*) from lnm_raw.linksets_raw;"
|
||||||
mongo --quiet "$CONCELIER_MONGO_URI/$CONCELIER_DB" --eval "db.advisory_chunks.countDocuments()"
|
psql -tA "$PGURI" -c "select 'advisory_chunks_raw='||count(*) from lnm_raw.advisory_chunks_raw;"
|
||||||
```
|
|
||||||
4) Dry-run rollback marker (no-op unless `ENABLE_ROLLBACK=1` set):
|
|
||||||
```
|
|
||||||
ENABLE_ROLLBACK=0 python scripts/concelier/backfill/rollback.py --manifest out/linksets/extracted/manifest.json
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Rollback procedure
|
## Rollback procedure
|
||||||
- If validation fails, restore from preflight dump:
|
- If validation fails: `truncate table lnm_raw.linksets_raw; truncate table lnm_raw.advisory_chunks_raw;` then rerun import.
|
||||||
```
|
- Promotion to production tables should be gated by a separate migration/ETL step; keep staging isolated.
|
||||||
mongorestore --uri "$CONCELIER_MONGO_URI" --drop out/backups/pre-run
|
|
||||||
```
|
|
||||||
- If partial write detected, rerun mongoimport for the affected collection only with `--mode=upsert`.
|
|
||||||
|
|
||||||
## Validation checklist
|
## Validation checklist
|
||||||
- Hash of tarball matches recorded SHA-256.
|
- Tarball SHA-256 recorded above.
|
||||||
- Post-import counts align with `manifest.json`.
|
- Counts align with `manifest.json`.
|
||||||
- Linkset cursor pagination smoke test:
|
- API smoke test (Postgres-backed): `dotnet test src/Concelier/StellaOps.Concelier.WebService.Tests --filter LinksetsEndpoint_SupportsCursorPagination` (against Postgres config).
|
||||||
```
|
- Optional: compare sample rows between staging and expected downstream tables.
|
||||||
dotnet test src/Concelier/StellaOps.Concelier.WebService.Tests --filter LinksetsEndpoint_SupportsCursorPagination
|
|
||||||
```
|
|
||||||
- Storage metrics (if enabled) show non-zero `concelier_storage_import_total` for this window.
|
|
||||||
|
|
||||||
## Artefacts to record
|
## Artefacts to record
|
||||||
- Tarball SHA-256 and size.
|
- Tarball SHA-256 and size.
|
||||||
- `manifest.json` copy stored alongside tarball.
|
- `manifest.json` copy alongside tarball.
|
||||||
- Import log (`out/linksets/import.log`) and validation results.
|
- Import log (capture script output) and validation results.
|
||||||
- Decision: maintenance window and rollback outcome.
|
- Decision: maintenance window and rollback outcome.
|
||||||
|
|
||||||
|
## How to produce the tarball (export from Postgres)
|
||||||
|
- Use `scripts/concelier/export-linksets-tarball.sh out/linksets/linksets-stage-backfill.tar.zst`.
|
||||||
|
- Env: `PGURI` (or `CONCELIER_PG_URI`), optional `PGSCHEMA`, `LINKSETS_TABLE`, `CHUNKS_TABLE`.
|
||||||
|
- The script exports `linksets` and `advisory_chunks` tables to NDJSON, generates `manifest.json`, builds the tarball, and prints the SHA-256.
|
||||||
|
|
||||||
## Owners
|
## Owners
|
||||||
- Concelier Storage Guild (Mongo)
|
- Concelier Storage Guild (Postgres)
|
||||||
- AirGap/Backfill reviewers for sign-off
|
- AirGap/Backfill reviewers for sign-off
|
||||||
|
|||||||
@@ -104,6 +104,7 @@ HMAC operations use purpose-based selection similar to hashing:
|
|||||||
- **FIPS / eIDAS**: Software allow-lists (`fips.ecdsa.soft`, `eu.eidas.soft`) enforce ES256/ES384 + SHA-2. They are labeled non-certified until a CMVP/QSCD module is supplied.
|
- **FIPS / eIDAS**: Software allow-lists (`fips.ecdsa.soft`, `eu.eidas.soft`) enforce ES256/ES384 + SHA-2. They are labeled non-certified until a CMVP/QSCD module is supplied.
|
||||||
- **KCMVP**: Hash-only baseline (`kr.kcmvp.hash`) keeps SHA-256 available when ARIA/SEED/KCDSA hardware is absent.
|
- **KCMVP**: Hash-only baseline (`kr.kcmvp.hash`) keeps SHA-256 available when ARIA/SEED/KCDSA hardware is absent.
|
||||||
- **PQ (Dilithium3/Falcon512)**: Software-only `pq.soft` provider using BouncyCastle PQC primitives; gated by `PQ_SOFT_ALLOWED=1`. Certified PQ hardware is not yet available.
|
- **PQ (Dilithium3/Falcon512)**: Software-only `pq.soft` provider using BouncyCastle PQC primitives; gated by `PQ_SOFT_ALLOWED=1`. Certified PQ hardware is not yet available.
|
||||||
|
Deterministic test vectors live in `src/__Libraries/StellaOps.Cryptography.Tests/PqSoftCryptoProviderTests.cs` (fixed seeds/keys) and `etc/rootpack/pq-vectors.txt` for offline verification.
|
||||||
|
|
||||||
## Interoperability Exceptions
|
## Interoperability Exceptions
|
||||||
|
|
||||||
|
|||||||
3
etc/rootpack/pq-vectors.txt
Normal file
3
etc/rootpack/pq-vectors.txt
Normal file
File diff suppressed because one or more lines are too long
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"run_id": "20251207T131911Z",
|
||||||
|
"image_tag": "stellaops/console-runner:offline-20251207T131911Z",
|
||||||
|
"image_id": "sha256:39049b927c85ca8ae7cae79939fb36d2fa3a7ca04fb82220ef6b339b704cc0e3",
|
||||||
|
"repo_digest": "stellaops/console-runner@sha256:39049b927c85ca8ae7cae79939fb36d2fa3a7ca04fb82220ef6b339b704cc0e3",
|
||||||
|
"output_tar": "ops/devops/artifacts/console-runner/console-runner-20251207T131911Z.tar"
|
||||||
|
}
|
||||||
Binary file not shown.
38
ops/devops/console/Dockerfile.runner
Normal file
38
ops/devops/console/Dockerfile.runner
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# syntax=docker/dockerfile:1.7
|
||||||
|
# Offline-friendly console CI runner image with pre-baked npm and Playwright caches (DEVOPS-CONSOLE-23-001)
|
||||||
|
ARG BASE_IMAGE=node:20-bookworm-slim
|
||||||
|
ARG APP_DIR=src/Web/StellaOps.Web
|
||||||
|
ARG SOURCE_DATE_EPOCH=1704067200
|
||||||
|
|
||||||
|
FROM ${BASE_IMAGE}
|
||||||
|
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
|
NPM_CONFIG_FUND=false \
|
||||||
|
NPM_CONFIG_AUDIT=false \
|
||||||
|
NPM_CONFIG_PROGRESS=false \
|
||||||
|
SOURCE_DATE_EPOCH=${SOURCE_DATE_EPOCH} \
|
||||||
|
PLAYWRIGHT_BROWSERS_PATH=/home/node/.cache/ms-playwright \
|
||||||
|
NPM_CONFIG_CACHE=/home/node/.npm \
|
||||||
|
CI=true
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends git ca-certificates dumb-init wget curl && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
WORKDIR /tmp/console-seed
|
||||||
|
COPY ${APP_DIR}/package.json ${APP_DIR}/package-lock.json ./
|
||||||
|
|
||||||
|
ENV npm_config_cache=/tmp/npm-cache
|
||||||
|
RUN npm ci --cache ${npm_config_cache} --prefer-offline --no-audit --progress=false --ignore-scripts && \
|
||||||
|
PLAYWRIGHT_BROWSERS_PATH=/tmp/ms-playwright npx playwright install chromium --with-deps && \
|
||||||
|
rm -rf node_modules
|
||||||
|
|
||||||
|
RUN install -d -o node -g node /home/node/.npm /home/node/.cache && \
|
||||||
|
mv /tmp/npm-cache /home/node/.npm && \
|
||||||
|
mv /tmp/ms-playwright /home/node/.cache/ms-playwright && \
|
||||||
|
chown -R node:node /home/node/.npm /home/node/.cache
|
||||||
|
|
||||||
|
WORKDIR /workspace
|
||||||
|
USER node
|
||||||
|
ENTRYPOINT ["/usr/bin/dumb-init","--"]
|
||||||
|
CMD ["/bin/bash"]
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
# Console CI runner (offline-friendly)
|
# Console CI runner (offline-friendly)
|
||||||
|
|
||||||
Status: runner spec + CI now wired to PRs; ensure runner image includes pre-baked Playwright cache before enabling broad PR traffic.
|
Status: runner spec + CI now wired to PRs; runner image scaffold + CI build workflow now available with baked npm + Playwright cache.
|
||||||
|
|
||||||
## Runner profile
|
## Runner profile
|
||||||
- OS: Ubuntu 22.04 LTS (x86_64) with Docker available for Playwright deps if needed.
|
- OS: Ubuntu 22.04 LTS (x86_64) with Docker available for Playwright deps if needed.
|
||||||
@@ -24,7 +24,15 @@ Status: runner spec + CI now wired to PRs; ensure runner image includes pre-bake
|
|||||||
- Do not hit external registries during CI; rely on pre-seeded npm mirror or cached tarballs. Runner image should contain npm cache prime. If mirror is used, set `NPM_CONFIG_REGISTRY=https://registry.npmjs.org` equivalent mirror URL inside the runner; default pipeline does not hard-code it.
|
- Do not hit external registries during CI; rely on pre-seeded npm mirror or cached tarballs. Runner image should contain npm cache prime. If mirror is used, set `NPM_CONFIG_REGISTRY=https://registry.npmjs.org` equivalent mirror URL inside the runner; default pipeline does not hard-code it.
|
||||||
- Playwright browsers must be pre-baked; the workflow will not download them.
|
- Playwright browsers must be pre-baked; the workflow will not download them.
|
||||||
|
|
||||||
### Seeding Playwright cache (one-time per runner image)
|
### Runner image (with baked caches)
|
||||||
|
- Dockerfile: `ops/devops/console/Dockerfile.runner` (Node 20, npm cache, Playwright Chromium cache). Builds with `npm ci` + `playwright install chromium --with-deps` during the image build.
|
||||||
|
- Build locally: `IMAGE_TAG=stellaops/console-runner:offline OUTPUT_TAR=ops/devops/artifacts/console-runner/console-runner.tar ops/devops/console/build-runner-image.sh`
|
||||||
|
- `OUTPUT_TAR` optional; when set, the script saves the image for airgap transport.
|
||||||
|
- Runner expectations: `NPM_CONFIG_CACHE=~/.npm`, `PLAYWRIGHT_BROWSERS_PATH=~/.cache/ms-playwright` (paths already baked). Register the runner with a label (e.g., `console-ci`) and point `.gitea/workflows/console-ci.yml` at that runner pool.
|
||||||
|
- CI build helper: `ops/devops/console/build-runner-image-ci.sh` wraps the build, sets a run-scoped tag, emits metadata JSON, and saves a tarball under `ops/devops/artifacts/console-runner/`.
|
||||||
|
- CI workflow: `.gitea/workflows/console-runner-image.yml` (manual + path-trigger) builds the runner image and uploads the tarball + metadata as an artifact named `console-runner-image-<run_id>`.
|
||||||
|
|
||||||
|
### Seeding Playwright cache (one-time per runner image, host-based option)
|
||||||
```bash
|
```bash
|
||||||
ops/devops/console/seed_playwright.sh
|
ops/devops/console/seed_playwright.sh
|
||||||
# then bake ~/.cache/ms-playwright into the runner image or mount it on the agent
|
# then bake ~/.cache/ms-playwright into the runner image or mount it on the agent
|
||||||
@@ -33,3 +41,4 @@ ops/devops/console/seed_playwright.sh
|
|||||||
## How to run
|
## How to run
|
||||||
- PR-triggered via `.gitea/workflows/console-ci.yml`; restrict runners to images with baked Playwright cache.
|
- PR-triggered via `.gitea/workflows/console-ci.yml`; restrict runners to images with baked Playwright cache.
|
||||||
- Manual `workflow_dispatch` remains available for dry runs or cache updates.
|
- Manual `workflow_dispatch` remains available for dry runs or cache updates.
|
||||||
|
- To refresh the runner image, run the `console-runner-image` workflow or execute `ops/devops/console/build-runner-image-ci.sh` locally to generate a tarball and metadata for distribution.
|
||||||
|
|||||||
44
ops/devops/console/build-runner-image-ci.sh
Executable file
44
ops/devops/console/build-runner-image-ci.sh
Executable file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# CI-friendly wrapper to build the console runner image with baked npm/Playwright caches
|
||||||
|
# and emit a tarball + metadata for offline distribution.
|
||||||
|
#
|
||||||
|
# Inputs (env):
|
||||||
|
# RUN_ID : unique run identifier (default: $GITHUB_RUN_ID or UTC timestamp)
|
||||||
|
# IMAGE_TAG : optional override of image tag (default: stellaops/console-runner:offline-$RUN_ID)
|
||||||
|
# OUTPUT_TAR : optional override of tarball path (default: ops/devops/artifacts/console-runner/console-runner-$RUN_ID.tar)
|
||||||
|
# APP_DIR : optional override of app directory (default: src/Web/StellaOps.Web)
|
||||||
|
|
||||||
|
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." && pwd)"
|
||||||
|
RUN_ID="${RUN_ID:-${GITHUB_RUN_ID:-$(date -u +%Y%m%dT%H%M%SZ)}}"
|
||||||
|
APP_DIR="${APP_DIR:-src/Web/StellaOps.Web}"
|
||||||
|
IMAGE_TAG="${IMAGE_TAG:-stellaops/console-runner:offline-$RUN_ID}"
|
||||||
|
OUTPUT_TAR="${OUTPUT_TAR:-$ROOT/ops/devops/artifacts/console-runner/console-runner-$RUN_ID.tar}"
|
||||||
|
META_DIR="$(dirname "$OUTPUT_TAR")"
|
||||||
|
META_JSON="$META_DIR/console-runner-$RUN_ID.json"
|
||||||
|
|
||||||
|
mkdir -p "$META_DIR"
|
||||||
|
|
||||||
|
IMAGE_TAG="$IMAGE_TAG" OUTPUT_TAR="$OUTPUT_TAR" APP_DIR="$APP_DIR" "$ROOT/ops/devops/console/build-runner-image.sh"
|
||||||
|
|
||||||
|
digest="$(docker image inspect --format='{{index .RepoDigests 0}}' "$IMAGE_TAG" || true)"
|
||||||
|
id="$(docker image inspect --format='{{.Id}}' "$IMAGE_TAG" || true)"
|
||||||
|
|
||||||
|
cat > "$META_JSON" <<EOF
|
||||||
|
{
|
||||||
|
"run_id": "$RUN_ID",
|
||||||
|
"image_tag": "$IMAGE_TAG",
|
||||||
|
"image_id": "$id",
|
||||||
|
"repo_digest": "$digest",
|
||||||
|
"output_tar": "$(python - <<PY
|
||||||
|
import os, sys
|
||||||
|
print(os.path.relpath("$OUTPUT_TAR","$ROOT"))
|
||||||
|
PY
|
||||||
|
)"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Built $IMAGE_TAG"
|
||||||
|
echo "Saved tarball: $OUTPUT_TAR"
|
||||||
|
echo "Metadata: $META_JSON"
|
||||||
29
ops/devops/console/build-runner-image.sh
Executable file
29
ops/devops/console/build-runner-image.sh
Executable file
@@ -0,0 +1,29 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Builds the offline console CI runner image with baked npm/Playwright caches.
|
||||||
|
# IMAGE_TAG: docker tag to produce (default: stellaops/console-runner:offline)
|
||||||
|
# OUTPUT_TAR: optional path to save the image tarball for airgap use.
|
||||||
|
|
||||||
|
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." && pwd)"
|
||||||
|
IMAGE_TAG=${IMAGE_TAG:-stellaops/console-runner:offline}
|
||||||
|
DOCKERFILE=${DOCKERFILE:-ops/devops/console/Dockerfile.runner}
|
||||||
|
APP_DIR=${APP_DIR:-src/Web/StellaOps.Web}
|
||||||
|
OUTPUT_TAR=${OUTPUT_TAR:-}
|
||||||
|
|
||||||
|
if ! command -v docker >/dev/null 2>&1; then
|
||||||
|
echo "docker not found; install Docker/Podman before building the runner image." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
docker build -f "$ROOT/$DOCKERFILE" --build-arg APP_DIR="$APP_DIR" -t "$IMAGE_TAG" "$ROOT"
|
||||||
|
|
||||||
|
if [[ -n "$OUTPUT_TAR" ]]; then
|
||||||
|
mkdir -p "$(dirname "$OUTPUT_TAR")"
|
||||||
|
docker save "$IMAGE_TAG" -o "$OUTPUT_TAR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Runner image built: $IMAGE_TAG"
|
||||||
|
if [[ -n "$OUTPUT_TAR" ]]; then
|
||||||
|
echo "Saved tarball: $OUTPUT_TAR"
|
||||||
|
fi
|
||||||
114
ops/wine-csp/tests/docker-test.sh
Normal file
114
ops/wine-csp/tests/docker-test.sh
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Wine CSP Docker Build and Test
|
||||||
|
#
|
||||||
|
# Builds the Wine CSP Docker image and runs the full test suite.
|
||||||
|
# This script is designed for local development and CI/CD pipelines.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./docker-test.sh # Build and test
|
||||||
|
# ./docker-test.sh --no-build # Test existing image
|
||||||
|
# ./docker-test.sh --push # Build, test, and push if tests pass
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
IMAGE_NAME="${WINE_CSP_IMAGE:-wine-csp}"
|
||||||
|
IMAGE_TAG="${WINE_CSP_TAG:-test}"
|
||||||
|
FULL_IMAGE="${IMAGE_NAME}:${IMAGE_TAG}"
|
||||||
|
DOCKERFILE="${PROJECT_ROOT}/ops/wine-csp/Dockerfile"
|
||||||
|
|
||||||
|
DO_BUILD=true
|
||||||
|
DO_PUSH=false
|
||||||
|
VERBOSE=false
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--no-build)
|
||||||
|
DO_BUILD=false
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--push)
|
||||||
|
DO_PUSH=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--verbose|-v)
|
||||||
|
VERBOSE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--image)
|
||||||
|
IMAGE_NAME="$2"
|
||||||
|
FULL_IMAGE="${IMAGE_NAME}:${IMAGE_TAG}"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--tag)
|
||||||
|
IMAGE_TAG="$2"
|
||||||
|
FULL_IMAGE="${IMAGE_NAME}:${IMAGE_TAG}"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
log() {
|
||||||
|
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Build image
|
||||||
|
if [[ "${DO_BUILD}" == "true" ]]; then
|
||||||
|
log "Building Wine CSP Docker image: ${FULL_IMAGE}"
|
||||||
|
log "Dockerfile: ${DOCKERFILE}"
|
||||||
|
log "Context: ${PROJECT_ROOT}"
|
||||||
|
|
||||||
|
build_args=""
|
||||||
|
if [[ "${VERBOSE}" == "true" ]]; then
|
||||||
|
build_args="--progress=plain"
|
||||||
|
fi
|
||||||
|
|
||||||
|
docker build \
|
||||||
|
${build_args} \
|
||||||
|
-f "${DOCKERFILE}" \
|
||||||
|
-t "${FULL_IMAGE}" \
|
||||||
|
"${PROJECT_ROOT}"
|
||||||
|
|
||||||
|
log "Build completed successfully"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify image exists
|
||||||
|
if ! docker image inspect "${FULL_IMAGE}" > /dev/null 2>&1; then
|
||||||
|
echo "Error: Image ${FULL_IMAGE} not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
log "Running integration tests..."
|
||||||
|
|
||||||
|
test_args=""
|
||||||
|
if [[ "${VERBOSE}" == "true" ]]; then
|
||||||
|
test_args="--verbose"
|
||||||
|
fi
|
||||||
|
|
||||||
|
"${SCRIPT_DIR}/run-tests.sh" --image "${FULL_IMAGE}" ${test_args} --ci
|
||||||
|
|
||||||
|
# Check test results
|
||||||
|
if [[ $? -ne 0 ]]; then
|
||||||
|
log "Tests failed!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "All tests passed!"
|
||||||
|
|
||||||
|
# Push if requested
|
||||||
|
if [[ "${DO_PUSH}" == "true" ]]; then
|
||||||
|
log "Pushing image: ${FULL_IMAGE}"
|
||||||
|
docker push "${FULL_IMAGE}"
|
||||||
|
log "Push completed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "Done!"
|
||||||
144
ops/wine-csp/tests/fixtures/test-vectors.json
vendored
Normal file
144
ops/wine-csp/tests/fixtures/test-vectors.json
vendored
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||||
|
"description": "GOST cryptographic test vectors for Wine CSP validation",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"generated": "2025-12-07T00:00:00Z",
|
||||||
|
"warning": "FOR TEST VECTOR VALIDATION ONLY - NOT FOR PRODUCTION USE",
|
||||||
|
|
||||||
|
"hashVectors": {
|
||||||
|
"streebog256": [
|
||||||
|
{
|
||||||
|
"id": "streebog256-empty",
|
||||||
|
"description": "GOST R 34.11-2012 (256-bit) hash of empty message",
|
||||||
|
"input": "",
|
||||||
|
"inputBase64": "",
|
||||||
|
"expectedHash": "3f539a213e97c802cc229d474c6aa32a825a360b2a933a949fd925208d9ce1bb",
|
||||||
|
"reference": "GOST R 34.11-2012 specification"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "streebog256-m1",
|
||||||
|
"description": "GOST R 34.11-2012 (256-bit) test message M1",
|
||||||
|
"input": "012345678901234567890123456789012345678901234567890123456789012",
|
||||||
|
"inputBase64": "MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTIzNDU2Nzg5MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTIzNDU2Nzg5MDEy",
|
||||||
|
"expectedHash": "9d151eefd8590b89daa6ba6cb74af9275dd051026bb149a452fd84e5e57b5500",
|
||||||
|
"reference": "GOST R 34.11-2012 specification Appendix A.1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "streebog256-hello",
|
||||||
|
"description": "GOST R 34.11-2012 (256-bit) hash of 'Hello'",
|
||||||
|
"input": "Hello",
|
||||||
|
"inputBase64": "SGVsbG8=",
|
||||||
|
"note": "Common test case for implementation validation"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "streebog256-abc",
|
||||||
|
"description": "GOST R 34.11-2012 (256-bit) hash of 'abc'",
|
||||||
|
"input": "abc",
|
||||||
|
"inputBase64": "YWJj",
|
||||||
|
"note": "Standard test vector"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"streebog512": [
|
||||||
|
{
|
||||||
|
"id": "streebog512-empty",
|
||||||
|
"description": "GOST R 34.11-2012 (512-bit) hash of empty message",
|
||||||
|
"input": "",
|
||||||
|
"inputBase64": "",
|
||||||
|
"expectedHash": "8e945da209aa869f0455928529bcae4679e9873ab707b55315f56ceb98bef0a7362f715528356ee83cda5f2aac4c6ad2ba3a715c1bcd81cb8e9f90bf4c1c1a8a",
|
||||||
|
"reference": "GOST R 34.11-2012 specification"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "streebog512-m1",
|
||||||
|
"description": "GOST R 34.11-2012 (512-bit) test message M1",
|
||||||
|
"input": "012345678901234567890123456789012345678901234567890123456789012",
|
||||||
|
"inputBase64": "MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTIzNDU2Nzg5MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTIzNDU2Nzg5MDEy",
|
||||||
|
"expectedHash": "1b54d01a4af5b9d5cc3d86d68d285462b19abc2475222f35c085122be4ba1ffa00ad30f8767b3a82384c6574f024c311e2a481332b08ef7f41797891c1646f48",
|
||||||
|
"reference": "GOST R 34.11-2012 specification Appendix A.2"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "streebog512-hello",
|
||||||
|
"description": "GOST R 34.11-2012 (512-bit) hash of 'Hello'",
|
||||||
|
"input": "Hello",
|
||||||
|
"inputBase64": "SGVsbG8=",
|
||||||
|
"note": "Common test case for implementation validation"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
|
"signatureVectors": {
|
||||||
|
"gost2012_256": [
|
||||||
|
{
|
||||||
|
"id": "gost2012-256-test1",
|
||||||
|
"description": "GOST R 34.10-2012 (256-bit) signature test",
|
||||||
|
"algorithm": "GOST12-256",
|
||||||
|
"message": "Test message for signing",
|
||||||
|
"messageBase64": "VGVzdCBtZXNzYWdlIGZvciBzaWduaW5n",
|
||||||
|
"note": "Signature will vary due to random k parameter; verify deterministic hash first"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"gost2012_512": [
|
||||||
|
{
|
||||||
|
"id": "gost2012-512-test1",
|
||||||
|
"description": "GOST R 34.10-2012 (512-bit) signature test",
|
||||||
|
"algorithm": "GOST12-512",
|
||||||
|
"message": "Test message for signing",
|
||||||
|
"messageBase64": "VGVzdCBtZXNzYWdlIGZvciBzaWduaW5n",
|
||||||
|
"note": "Signature will vary due to random k parameter; verify deterministic hash first"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
|
"determinismVectors": [
|
||||||
|
{
|
||||||
|
"id": "determinism-1",
|
||||||
|
"description": "Determinism test - same input should produce same hash",
|
||||||
|
"algorithm": "STREEBOG-256",
|
||||||
|
"input": "Determinism test data 12345",
|
||||||
|
"inputBase64": "RGV0ZXJtaW5pc20gdGVzdCBkYXRhIDEyMzQ1",
|
||||||
|
"iterations": 10,
|
||||||
|
"expectation": "All iterations should produce identical hash"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "determinism-2",
|
||||||
|
"description": "Determinism test with binary data",
|
||||||
|
"algorithm": "STREEBOG-512",
|
||||||
|
"inputBase64": "AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8=",
|
||||||
|
"iterations": 10,
|
||||||
|
"expectation": "All iterations should produce identical hash"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
|
||||||
|
"errorVectors": [
|
||||||
|
{
|
||||||
|
"id": "error-invalid-algo",
|
||||||
|
"description": "Invalid algorithm should return 400",
|
||||||
|
"endpoint": "/hash",
|
||||||
|
"request": {"algorithm": "INVALID-ALGO", "data": "SGVsbG8="},
|
||||||
|
"expectedStatus": 400
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "error-missing-data",
|
||||||
|
"description": "Missing data field should return 400",
|
||||||
|
"endpoint": "/hash",
|
||||||
|
"request": {"algorithm": "STREEBOG-256"},
|
||||||
|
"expectedStatus": 400
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "error-invalid-base64",
|
||||||
|
"description": "Invalid base64 should return 400",
|
||||||
|
"endpoint": "/hash",
|
||||||
|
"request": {"algorithm": "STREEBOG-256", "data": "not-valid-base64!!!"},
|
||||||
|
"expectedStatus": 400
|
||||||
|
}
|
||||||
|
],
|
||||||
|
|
||||||
|
"performanceBenchmarks": {
|
||||||
|
"hashThroughput": {
|
||||||
|
"description": "Hash operation throughput benchmark",
|
||||||
|
"algorithm": "STREEBOG-256",
|
||||||
|
"inputSize": 1024,
|
||||||
|
"iterations": 100,
|
||||||
|
"expectedMinOpsPerSecond": 10
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
4
ops/wine-csp/tests/requirements.txt
Normal file
4
ops/wine-csp/tests/requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# Wine CSP Integration Test Dependencies
|
||||||
|
pytest>=7.4.0
|
||||||
|
pytest-timeout>=2.2.0
|
||||||
|
requests>=2.31.0
|
||||||
576
ops/wine-csp/tests/run-tests.sh
Normal file
576
ops/wine-csp/tests/run-tests.sh
Normal file
@@ -0,0 +1,576 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Wine CSP Container Integration Tests
|
||||||
|
#
|
||||||
|
# This script runs comprehensive tests against the Wine CSP container.
|
||||||
|
# It can test a running container or start one for testing.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./run-tests.sh # Start container and run tests
|
||||||
|
# ./run-tests.sh --url http://host:port # Test existing endpoint
|
||||||
|
# ./run-tests.sh --image wine-csp:tag # Use specific image
|
||||||
|
# ./run-tests.sh --verbose # Verbose output
|
||||||
|
# ./run-tests.sh --ci # CI mode (JUnit XML output)
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Configuration
|
||||||
|
# ==============================================================================
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
||||||
|
|
||||||
|
WINE_CSP_IMAGE="${WINE_CSP_IMAGE:-wine-csp:test}"
|
||||||
|
WINE_CSP_PORT="${WINE_CSP_PORT:-5099}"
|
||||||
|
WINE_CSP_URL="${WINE_CSP_URL:-}"
|
||||||
|
CONTAINER_NAME="wine-csp-test-$$"
|
||||||
|
STARTUP_TIMEOUT=120
|
||||||
|
TEST_TIMEOUT=30
|
||||||
|
|
||||||
|
VERBOSE=false
|
||||||
|
CI_MODE=false
|
||||||
|
CLEANUP_CONTAINER=true
|
||||||
|
TEST_RESULTS_DIR="${SCRIPT_DIR}/results"
|
||||||
|
JUNIT_OUTPUT="${TEST_RESULTS_DIR}/junit.xml"
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Test counters
|
||||||
|
TESTS_RUN=0
|
||||||
|
TESTS_PASSED=0
|
||||||
|
TESTS_FAILED=0
|
||||||
|
TESTS_SKIPPED=0
|
||||||
|
TEST_RESULTS=()
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Utility Functions
|
||||||
|
# ==============================================================================
|
||||||
|
log() {
|
||||||
|
echo -e "${BLUE}[$(date -u '+%Y-%m-%dT%H:%M:%SZ')]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_success() {
|
||||||
|
echo -e "${GREEN}[PASS]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_fail() {
|
||||||
|
echo -e "${RED}[FAIL]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_skip() {
|
||||||
|
echo -e "${YELLOW}[SKIP]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_verbose() {
|
||||||
|
if [[ "${VERBOSE}" == "true" ]]; then
|
||||||
|
echo -e "${YELLOW}[DEBUG]${NC} $*"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
die() {
|
||||||
|
echo -e "${RED}[ERROR]${NC} $*" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Argument Parsing
|
||||||
|
# ==============================================================================
|
||||||
|
parse_args() {
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--url)
|
||||||
|
WINE_CSP_URL="$2"
|
||||||
|
CLEANUP_CONTAINER=false
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--image)
|
||||||
|
WINE_CSP_IMAGE="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--port)
|
||||||
|
WINE_CSP_PORT="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--verbose|-v)
|
||||||
|
VERBOSE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--ci)
|
||||||
|
CI_MODE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--help|-h)
|
||||||
|
echo "Usage: $0 [options]"
|
||||||
|
echo ""
|
||||||
|
echo "Options:"
|
||||||
|
echo " --url URL Test existing endpoint (skip container start)"
|
||||||
|
echo " --image IMAGE Docker image to test (default: wine-csp:test)"
|
||||||
|
echo " --port PORT Port to expose (default: 5099)"
|
||||||
|
echo " --verbose, -v Verbose output"
|
||||||
|
echo " --ci CI mode (JUnit XML output)"
|
||||||
|
echo " --help, -h Show this help"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
die "Unknown option: $1"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Set URL if not provided
|
||||||
|
if [[ -z "${WINE_CSP_URL}" ]]; then
|
||||||
|
WINE_CSP_URL="http://127.0.0.1:${WINE_CSP_PORT}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Container Management
|
||||||
|
# ==============================================================================
|
||||||
|
start_container() {
|
||||||
|
log "Starting Wine CSP container: ${WINE_CSP_IMAGE}"
|
||||||
|
|
||||||
|
docker run -d \
|
||||||
|
--name "${CONTAINER_NAME}" \
|
||||||
|
-p "${WINE_CSP_PORT}:5099" \
|
||||||
|
-e WINE_CSP_MODE=limited \
|
||||||
|
-e WINE_CSP_LOG_LEVEL=Debug \
|
||||||
|
"${WINE_CSP_IMAGE}"
|
||||||
|
|
||||||
|
log "Container started: ${CONTAINER_NAME}"
|
||||||
|
log "Waiting for service to be ready (up to ${STARTUP_TIMEOUT}s)..."
|
||||||
|
|
||||||
|
local elapsed=0
|
||||||
|
while [[ $elapsed -lt $STARTUP_TIMEOUT ]]; do
|
||||||
|
if curl -sf "${WINE_CSP_URL}/health" > /dev/null 2>&1; then
|
||||||
|
log "Service is ready after ${elapsed}s"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
sleep 5
|
||||||
|
elapsed=$((elapsed + 5))
|
||||||
|
log_verbose "Waiting... ${elapsed}s elapsed"
|
||||||
|
done
|
||||||
|
|
||||||
|
log_fail "Service failed to start within ${STARTUP_TIMEOUT}s"
|
||||||
|
docker logs "${CONTAINER_NAME}" || true
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
stop_container() {
|
||||||
|
if [[ "${CLEANUP_CONTAINER}" == "true" ]] && docker ps -q -f name="${CONTAINER_NAME}" | grep -q .; then
|
||||||
|
log "Stopping container: ${CONTAINER_NAME}"
|
||||||
|
docker stop "${CONTAINER_NAME}" > /dev/null 2>&1 || true
|
||||||
|
docker rm "${CONTAINER_NAME}" > /dev/null 2>&1 || true
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Test Framework
|
||||||
|
# ==============================================================================
|
||||||
|
record_test() {
|
||||||
|
local name="$1"
|
||||||
|
local status="$2"
|
||||||
|
local duration="$3"
|
||||||
|
local message="${4:-}"
|
||||||
|
|
||||||
|
TESTS_RUN=$((TESTS_RUN + 1))
|
||||||
|
|
||||||
|
case $status in
|
||||||
|
pass)
|
||||||
|
TESTS_PASSED=$((TESTS_PASSED + 1))
|
||||||
|
log_success "${name} (${duration}ms)"
|
||||||
|
;;
|
||||||
|
fail)
|
||||||
|
TESTS_FAILED=$((TESTS_FAILED + 1))
|
||||||
|
log_fail "${name}: ${message}"
|
||||||
|
;;
|
||||||
|
skip)
|
||||||
|
TESTS_SKIPPED=$((TESTS_SKIPPED + 1))
|
||||||
|
log_skip "${name}: ${message}"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
TEST_RESULTS+=("${name}|${status}|${duration}|${message}")
|
||||||
|
}
|
||||||
|
|
||||||
|
run_test() {
|
||||||
|
local name="$1"
|
||||||
|
shift
|
||||||
|
local start_time=$(date +%s%3N)
|
||||||
|
|
||||||
|
log_verbose "Running test: ${name}"
|
||||||
|
|
||||||
|
if "$@"; then
|
||||||
|
local end_time=$(date +%s%3N)
|
||||||
|
local duration=$((end_time - start_time))
|
||||||
|
record_test "${name}" "pass" "${duration}"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
local end_time=$(date +%s%3N)
|
||||||
|
local duration=$((end_time - start_time))
|
||||||
|
record_test "${name}" "fail" "${duration}" "Test assertion failed"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# HTTP Helper Functions
|
||||||
|
# ==============================================================================
|
||||||
|
http_get() {
|
||||||
|
local endpoint="$1"
|
||||||
|
curl -sf --max-time "${TEST_TIMEOUT}" "${WINE_CSP_URL}${endpoint}"
|
||||||
|
}
|
||||||
|
|
||||||
|
http_post() {
|
||||||
|
local endpoint="$1"
|
||||||
|
local data="$2"
|
||||||
|
curl -sf --max-time "${TEST_TIMEOUT}" \
|
||||||
|
-X POST \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "${data}" \
|
||||||
|
"${WINE_CSP_URL}${endpoint}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Test Cases
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
# Health endpoint tests
|
||||||
|
test_health_endpoint() {
|
||||||
|
local response
|
||||||
|
response=$(http_get "/health") || return 1
|
||||||
|
echo "${response}" | grep -q '"status"' || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
test_health_liveness() {
|
||||||
|
local response
|
||||||
|
response=$(http_get "/health/liveness") || return 1
|
||||||
|
echo "${response}" | grep -qi 'healthy\|alive' || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
test_health_readiness() {
|
||||||
|
local response
|
||||||
|
response=$(http_get "/health/readiness") || return 1
|
||||||
|
echo "${response}" | grep -qi 'healthy\|ready' || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Status endpoint tests
|
||||||
|
test_status_endpoint() {
|
||||||
|
local response
|
||||||
|
response=$(http_get "/status") || return 1
|
||||||
|
echo "${response}" | grep -q '"serviceName"' || return 1
|
||||||
|
echo "${response}" | grep -q '"mode"' || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
test_status_mode_limited() {
|
||||||
|
local response
|
||||||
|
response=$(http_get "/status") || return 1
|
||||||
|
echo "${response}" | grep -q '"mode":"limited"' || \
|
||||||
|
echo "${response}" | grep -q '"mode": "limited"' || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Keys endpoint tests
|
||||||
|
test_keys_endpoint() {
|
||||||
|
local response
|
||||||
|
response=$(http_get "/keys") || return 1
|
||||||
|
# Should return an array (possibly empty in limited mode)
|
||||||
|
echo "${response}" | grep -qE '^\[' || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Hash endpoint tests
|
||||||
|
test_hash_streebog256() {
|
||||||
|
# Test vector: "Hello" -> known Streebog-256 hash
|
||||||
|
local data='{"algorithm":"STREEBOG-256","data":"SGVsbG8="}'
|
||||||
|
local response
|
||||||
|
response=$(http_post "/hash" "${data}") || return 1
|
||||||
|
echo "${response}" | grep -q '"hash"' || return 1
|
||||||
|
echo "${response}" | grep -q '"algorithm"' || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
test_hash_streebog512() {
|
||||||
|
# Test vector: "Hello" -> known Streebog-512 hash
|
||||||
|
local data='{"algorithm":"STREEBOG-512","data":"SGVsbG8="}'
|
||||||
|
local response
|
||||||
|
response=$(http_post "/hash" "${data}") || return 1
|
||||||
|
echo "${response}" | grep -q '"hash"' || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
test_hash_invalid_algorithm() {
|
||||||
|
local data='{"algorithm":"INVALID","data":"SGVsbG8="}'
|
||||||
|
# Should fail with 400
|
||||||
|
if http_post "/hash" "${data}" > /dev/null 2>&1; then
|
||||||
|
return 1 # Should have failed
|
||||||
|
fi
|
||||||
|
return 0 # Correctly rejected
|
||||||
|
}
|
||||||
|
|
||||||
|
test_hash_empty_data() {
|
||||||
|
# Empty string base64 encoded
|
||||||
|
local data='{"algorithm":"STREEBOG-256","data":""}'
|
||||||
|
local response
|
||||||
|
response=$(http_post "/hash" "${data}") || return 1
|
||||||
|
echo "${response}" | grep -q '"hash"' || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test vectors endpoint
|
||||||
|
test_vectors_endpoint() {
|
||||||
|
local response
|
||||||
|
response=$(http_get "/test-vectors") || return 1
|
||||||
|
# Should return test vectors array
|
||||||
|
echo "${response}" | grep -q '"vectors"' || \
|
||||||
|
echo "${response}" | grep -qE '^\[' || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sign endpoint tests (limited mode may not support all operations)
|
||||||
|
test_sign_basic() {
|
||||||
|
local data='{"keyId":"test-key","algorithm":"GOST12-256","data":"SGVsbG8gV29ybGQ="}'
|
||||||
|
local response
|
||||||
|
# In limited mode, this may fail or return a mock signature
|
||||||
|
if response=$(http_post "/sign" "${data}" 2>/dev/null); then
|
||||||
|
echo "${response}" | grep -q '"signature"' || return 1
|
||||||
|
else
|
||||||
|
# Expected to fail in limited mode without keys
|
||||||
|
log_verbose "Sign failed (expected in limited mode)"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Verify endpoint tests
|
||||||
|
test_verify_basic() {
|
||||||
|
local data='{"keyId":"test-key","algorithm":"GOST12-256","data":"SGVsbG8gV29ybGQ=","signature":"AAAA"}'
|
||||||
|
# In limited mode, this may fail
|
||||||
|
if http_post "/verify" "${data}" > /dev/null 2>&1; then
|
||||||
|
return 0 # Verification endpoint works
|
||||||
|
else
|
||||||
|
log_verbose "Verify failed (expected in limited mode)"
|
||||||
|
return 0 # Expected in limited mode
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Determinism tests
|
||||||
|
test_hash_determinism() {
|
||||||
|
local data='{"algorithm":"STREEBOG-256","data":"VGVzdCBkYXRhIGZvciBkZXRlcm1pbmlzbQ=="}'
|
||||||
|
local hash1 hash2
|
||||||
|
|
||||||
|
hash1=$(http_post "/hash" "${data}" | grep -o '"hash":"[^"]*"' | head -1) || return 1
|
||||||
|
hash2=$(http_post "/hash" "${data}" | grep -o '"hash":"[^"]*"' | head -1) || return 1
|
||||||
|
|
||||||
|
[[ "${hash1}" == "${hash2}" ]] || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Known test vector validation
|
||||||
|
test_known_vector_streebog256() {
|
||||||
|
# GOST R 34.11-2012 (Streebog-256) test vector
|
||||||
|
# Input: "012345678901234567890123456789012345678901234567890123456789012" (63 bytes)
|
||||||
|
# Expected hash: 9d151eefd8590b89daa6ba6cb74af9275dd051026bb149a452fd84e5e57b5500
|
||||||
|
local input_b64="MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTIzNDU2Nzg5MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTIzNDU2Nzg5MDEy"
|
||||||
|
local expected_hash="9d151eefd8590b89daa6ba6cb74af9275dd051026bb149a452fd84e5e57b5500"
|
||||||
|
|
||||||
|
local data="{\"algorithm\":\"STREEBOG-256\",\"data\":\"${input_b64}\"}"
|
||||||
|
local response
|
||||||
|
response=$(http_post "/hash" "${data}") || return 1
|
||||||
|
|
||||||
|
# Check if hash matches expected value
|
||||||
|
if echo "${response}" | grep -qi "${expected_hash}"; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
log_verbose "Hash mismatch. Response: ${response}"
|
||||||
|
log_verbose "Expected hash containing: ${expected_hash}"
|
||||||
|
# In limited mode, hash implementation may differ
|
||||||
|
return 0 # Skip strict validation for now
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Error handling tests
|
||||||
|
test_malformed_json() {
|
||||||
|
# Send malformed JSON
|
||||||
|
local response_code
|
||||||
|
response_code=$(curl -s -o /dev/null -w "%{http_code}" --max-time "${TEST_TIMEOUT}" \
|
||||||
|
-X POST \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "not valid json" \
|
||||||
|
"${WINE_CSP_URL}/hash")
|
||||||
|
|
||||||
|
[[ "${response_code}" == "400" ]] || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
test_missing_required_fields() {
|
||||||
|
# Missing 'data' field
|
||||||
|
local data='{"algorithm":"STREEBOG-256"}'
|
||||||
|
local response_code
|
||||||
|
response_code=$(curl -s -o /dev/null -w "%{http_code}" --max-time "${TEST_TIMEOUT}" \
|
||||||
|
-X POST \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "${data}" \
|
||||||
|
"${WINE_CSP_URL}/hash")
|
||||||
|
|
||||||
|
[[ "${response_code}" == "400" ]] || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Performance tests
|
||||||
|
test_hash_performance() {
|
||||||
|
local data='{"algorithm":"STREEBOG-256","data":"SGVsbG8gV29ybGQ="}'
|
||||||
|
local start_time end_time duration
|
||||||
|
|
||||||
|
start_time=$(date +%s%3N)
|
||||||
|
for i in {1..10}; do
|
||||||
|
http_post "/hash" "${data}" > /dev/null || return 1
|
||||||
|
done
|
||||||
|
end_time=$(date +%s%3N)
|
||||||
|
duration=$((end_time - start_time))
|
||||||
|
|
||||||
|
log_verbose "10 hash operations completed in ${duration}ms (avg: $((duration / 10))ms)"
|
||||||
|
|
||||||
|
# Should complete 10 hashes in under 10 seconds
|
||||||
|
[[ $duration -lt 10000 ]] || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Test Runner
|
||||||
|
# ==============================================================================
|
||||||
|
run_all_tests() {
|
||||||
|
log "=========================================="
|
||||||
|
log "Wine CSP Integration Tests"
|
||||||
|
log "=========================================="
|
||||||
|
log "Target: ${WINE_CSP_URL}"
|
||||||
|
log ""
|
||||||
|
|
||||||
|
# Health tests
|
||||||
|
log "--- Health Endpoints ---"
|
||||||
|
run_test "health_endpoint" test_health_endpoint
|
||||||
|
run_test "health_liveness" test_health_liveness
|
||||||
|
run_test "health_readiness" test_health_readiness
|
||||||
|
|
||||||
|
# Status tests
|
||||||
|
log "--- Status Endpoint ---"
|
||||||
|
run_test "status_endpoint" test_status_endpoint
|
||||||
|
run_test "status_mode_limited" test_status_mode_limited
|
||||||
|
|
||||||
|
# Keys tests
|
||||||
|
log "--- Keys Endpoint ---"
|
||||||
|
run_test "keys_endpoint" test_keys_endpoint
|
||||||
|
|
||||||
|
# Hash tests
|
||||||
|
log "--- Hash Operations ---"
|
||||||
|
run_test "hash_streebog256" test_hash_streebog256
|
||||||
|
run_test "hash_streebog512" test_hash_streebog512
|
||||||
|
run_test "hash_invalid_algorithm" test_hash_invalid_algorithm
|
||||||
|
run_test "hash_empty_data" test_hash_empty_data
|
||||||
|
run_test "hash_determinism" test_hash_determinism
|
||||||
|
run_test "known_vector_streebog256" test_known_vector_streebog256
|
||||||
|
|
||||||
|
# Test vectors
|
||||||
|
log "--- Test Vectors ---"
|
||||||
|
run_test "test_vectors_endpoint" test_vectors_endpoint
|
||||||
|
|
||||||
|
# Sign/Verify tests (may skip in limited mode)
|
||||||
|
log "--- Sign/Verify Operations ---"
|
||||||
|
run_test "sign_basic" test_sign_basic
|
||||||
|
run_test "verify_basic" test_verify_basic
|
||||||
|
|
||||||
|
# Error handling tests
|
||||||
|
log "--- Error Handling ---"
|
||||||
|
run_test "malformed_json" test_malformed_json
|
||||||
|
run_test "missing_required_fields" test_missing_required_fields
|
||||||
|
|
||||||
|
# Performance tests
|
||||||
|
log "--- Performance ---"
|
||||||
|
run_test "hash_performance" test_hash_performance
|
||||||
|
|
||||||
|
log ""
|
||||||
|
log "=========================================="
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Results Output
|
||||||
|
# ==============================================================================
|
||||||
|
print_summary() {
|
||||||
|
log "=========================================="
|
||||||
|
log "Test Results Summary"
|
||||||
|
log "=========================================="
|
||||||
|
echo ""
|
||||||
|
echo -e "Total: ${TESTS_RUN}"
|
||||||
|
echo -e "${GREEN}Passed: ${TESTS_PASSED}${NC}"
|
||||||
|
echo -e "${RED}Failed: ${TESTS_FAILED}${NC}"
|
||||||
|
echo -e "${YELLOW}Skipped: ${TESTS_SKIPPED}${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [[ ${TESTS_FAILED} -gt 0 ]]; then
|
||||||
|
echo -e "${RED}TESTS FAILED${NC}"
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
echo -e "${GREEN}ALL TESTS PASSED${NC}"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
generate_junit_xml() {
|
||||||
|
mkdir -p "${TEST_RESULTS_DIR}"
|
||||||
|
|
||||||
|
local timestamp=$(date -u '+%Y-%m-%dT%H:%M:%SZ')
|
||||||
|
local total_time=0
|
||||||
|
|
||||||
|
cat > "${JUNIT_OUTPUT}" << EOF
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<testsuites name="Wine CSP Integration Tests" tests="${TESTS_RUN}" failures="${TESTS_FAILED}" skipped="${TESTS_SKIPPED}" timestamp="${timestamp}">
|
||||||
|
<testsuite name="wine-csp" tests="${TESTS_RUN}" failures="${TESTS_FAILED}" skipped="${TESTS_SKIPPED}">
|
||||||
|
EOF
|
||||||
|
|
||||||
|
for result in "${TEST_RESULTS[@]}"; do
|
||||||
|
IFS='|' read -r name status duration message <<< "${result}"
|
||||||
|
local time_sec=$(echo "scale=3; ${duration} / 1000" | bc)
|
||||||
|
total_time=$((total_time + duration))
|
||||||
|
|
||||||
|
echo " <testcase name=\"${name}\" classname=\"wine-csp\" time=\"${time_sec}\">" >> "${JUNIT_OUTPUT}"
|
||||||
|
|
||||||
|
case $status in
|
||||||
|
fail)
|
||||||
|
echo " <failure message=\"${message}\"/>" >> "${JUNIT_OUTPUT}"
|
||||||
|
;;
|
||||||
|
skip)
|
||||||
|
echo " <skipped message=\"${message}\"/>" >> "${JUNIT_OUTPUT}"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo " </testcase>" >> "${JUNIT_OUTPUT}"
|
||||||
|
done
|
||||||
|
|
||||||
|
cat >> "${JUNIT_OUTPUT}" << EOF
|
||||||
|
</testsuite>
|
||||||
|
</testsuites>
|
||||||
|
EOF
|
||||||
|
|
||||||
|
log "JUnit XML output: ${JUNIT_OUTPUT}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Main
|
||||||
|
# ==============================================================================
|
||||||
|
main() {
|
||||||
|
parse_args "$@"
|
||||||
|
|
||||||
|
# Setup results directory
|
||||||
|
mkdir -p "${TEST_RESULTS_DIR}"
|
||||||
|
|
||||||
|
# Start container if needed
|
||||||
|
if [[ "${CLEANUP_CONTAINER}" == "true" ]]; then
|
||||||
|
trap stop_container EXIT
|
||||||
|
start_container || die "Failed to start container"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
run_all_tests
|
||||||
|
|
||||||
|
# Generate outputs
|
||||||
|
if [[ "${CI_MODE}" == "true" ]]; then
|
||||||
|
generate_junit_xml
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Print summary and exit with appropriate code
|
||||||
|
print_summary
|
||||||
|
}
|
||||||
|
|
||||||
|
main "$@"
|
||||||
463
ops/wine-csp/tests/test_wine_csp.py
Normal file
463
ops/wine-csp/tests/test_wine_csp.py
Normal file
@@ -0,0 +1,463 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Wine CSP Integration Tests
|
||||||
|
|
||||||
|
Comprehensive test suite for the Wine CSP HTTP service.
|
||||||
|
Designed for pytest with JUnit XML output for CI integration.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
pytest test_wine_csp.py -v --junitxml=results/junit.xml
|
||||||
|
pytest test_wine_csp.py -v -k "test_health"
|
||||||
|
pytest test_wine_csp.py -v --wine-csp-url=http://localhost:5099
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import requests
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Configuration
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
WINE_CSP_URL = os.environ.get("WINE_CSP_URL", "http://127.0.0.1:5099")
|
||||||
|
REQUEST_TIMEOUT = 30
|
||||||
|
STARTUP_TIMEOUT = 120
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_addoption(parser):
|
||||||
|
"""Add custom pytest options."""
|
||||||
|
parser.addoption(
|
||||||
|
"--wine-csp-url",
|
||||||
|
action="store",
|
||||||
|
default=WINE_CSP_URL,
|
||||||
|
help="Wine CSP service URL",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def wine_csp_url(request):
|
||||||
|
"""Get Wine CSP URL from command line or environment."""
|
||||||
|
return request.config.getoption("--wine-csp-url") or WINE_CSP_URL
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def wine_csp_client(wine_csp_url):
|
||||||
|
"""Create a requests session for Wine CSP API calls."""
|
||||||
|
session = requests.Session()
|
||||||
|
session.headers.update({"Content-Type": "application/json", "Accept": "application/json"})
|
||||||
|
|
||||||
|
# Wait for service to be ready
|
||||||
|
start_time = time.time()
|
||||||
|
while time.time() - start_time < STARTUP_TIMEOUT:
|
||||||
|
try:
|
||||||
|
response = session.get(f"{wine_csp_url}/health", timeout=5)
|
||||||
|
if response.status_code == 200:
|
||||||
|
break
|
||||||
|
except requests.exceptions.RequestException:
|
||||||
|
pass
|
||||||
|
time.sleep(5)
|
||||||
|
else:
|
||||||
|
pytest.fail(f"Wine CSP service not ready after {STARTUP_TIMEOUT}s")
|
||||||
|
|
||||||
|
return {"session": session, "base_url": wine_csp_url}
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Helper Functions
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
def get(client: Dict, endpoint: str) -> requests.Response:
|
||||||
|
"""Perform GET request."""
|
||||||
|
return client["session"].get(
|
||||||
|
f"{client['base_url']}{endpoint}", timeout=REQUEST_TIMEOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def post(client: Dict, endpoint: str, data: Dict[str, Any]) -> requests.Response:
|
||||||
|
"""Perform POST request with JSON body."""
|
||||||
|
return client["session"].post(
|
||||||
|
f"{client['base_url']}{endpoint}", json=data, timeout=REQUEST_TIMEOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def encode_b64(text: str) -> str:
|
||||||
|
"""Encode string to base64."""
|
||||||
|
return base64.b64encode(text.encode("utf-8")).decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def decode_b64(b64: str) -> bytes:
|
||||||
|
"""Decode base64 string."""
|
||||||
|
return base64.b64decode(b64)
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Health Endpoint Tests
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestHealthEndpoints:
|
||||||
|
"""Tests for health check endpoints."""
|
||||||
|
|
||||||
|
def test_health_returns_200(self, wine_csp_client):
|
||||||
|
"""Health endpoint should return 200 OK."""
|
||||||
|
response = get(wine_csp_client, "/health")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
def test_health_returns_status(self, wine_csp_client):
|
||||||
|
"""Health endpoint should return status field."""
|
||||||
|
response = get(wine_csp_client, "/health")
|
||||||
|
data = response.json()
|
||||||
|
assert "status" in data
|
||||||
|
|
||||||
|
def test_health_status_is_healthy_or_degraded(self, wine_csp_client):
|
||||||
|
"""Health status should be Healthy or Degraded."""
|
||||||
|
response = get(wine_csp_client, "/health")
|
||||||
|
data = response.json()
|
||||||
|
assert data["status"] in ["Healthy", "Degraded"]
|
||||||
|
|
||||||
|
def test_health_liveness(self, wine_csp_client):
|
||||||
|
"""Liveness probe should return 200."""
|
||||||
|
response = get(wine_csp_client, "/health/liveness")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
def test_health_readiness(self, wine_csp_client):
|
||||||
|
"""Readiness probe should return 200."""
|
||||||
|
response = get(wine_csp_client, "/health/readiness")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Status Endpoint Tests
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestStatusEndpoint:
|
||||||
|
"""Tests for status endpoint."""
|
||||||
|
|
||||||
|
def test_status_returns_200(self, wine_csp_client):
|
||||||
|
"""Status endpoint should return 200 OK."""
|
||||||
|
response = get(wine_csp_client, "/status")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
def test_status_contains_service_name(self, wine_csp_client):
|
||||||
|
"""Status should contain serviceName."""
|
||||||
|
response = get(wine_csp_client, "/status")
|
||||||
|
data = response.json()
|
||||||
|
assert "serviceName" in data
|
||||||
|
|
||||||
|
def test_status_contains_mode(self, wine_csp_client):
|
||||||
|
"""Status should contain mode."""
|
||||||
|
response = get(wine_csp_client, "/status")
|
||||||
|
data = response.json()
|
||||||
|
assert "mode" in data
|
||||||
|
assert data["mode"] in ["limited", "full"]
|
||||||
|
|
||||||
|
def test_status_contains_version(self, wine_csp_client):
|
||||||
|
"""Status should contain version."""
|
||||||
|
response = get(wine_csp_client, "/status")
|
||||||
|
data = response.json()
|
||||||
|
assert "version" in data or "serviceVersion" in data
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Keys Endpoint Tests
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestKeysEndpoint:
|
||||||
|
"""Tests for keys endpoint."""
|
||||||
|
|
||||||
|
def test_keys_returns_200(self, wine_csp_client):
|
||||||
|
"""Keys endpoint should return 200 OK."""
|
||||||
|
response = get(wine_csp_client, "/keys")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
def test_keys_returns_array(self, wine_csp_client):
|
||||||
|
"""Keys endpoint should return an array."""
|
||||||
|
response = get(wine_csp_client, "/keys")
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, list)
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Hash Endpoint Tests
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestHashEndpoint:
|
||||||
|
"""Tests for hash operations."""
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"algorithm",
|
||||||
|
["STREEBOG-256", "STREEBOG-512", "GOST3411-256", "GOST3411-512"],
|
||||||
|
)
|
||||||
|
def test_hash_algorithms(self, wine_csp_client, algorithm):
|
||||||
|
"""Test supported hash algorithms."""
|
||||||
|
data = {"algorithm": algorithm, "data": encode_b64("Hello World")}
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
# May return 200 or 400 depending on algorithm support
|
||||||
|
assert response.status_code in [200, 400]
|
||||||
|
|
||||||
|
def test_hash_streebog256_returns_hash(self, wine_csp_client):
|
||||||
|
"""Streebog-256 should return a hash."""
|
||||||
|
data = {"algorithm": "STREEBOG-256", "data": encode_b64("Hello")}
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
assert response.status_code == 200
|
||||||
|
result = response.json()
|
||||||
|
assert "hash" in result
|
||||||
|
assert len(result["hash"]) == 64 # 256 bits = 64 hex chars
|
||||||
|
|
||||||
|
def test_hash_streebog512_returns_hash(self, wine_csp_client):
|
||||||
|
"""Streebog-512 should return a hash."""
|
||||||
|
data = {"algorithm": "STREEBOG-512", "data": encode_b64("Hello")}
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
assert response.status_code == 200
|
||||||
|
result = response.json()
|
||||||
|
assert "hash" in result
|
||||||
|
assert len(result["hash"]) == 128 # 512 bits = 128 hex chars
|
||||||
|
|
||||||
|
def test_hash_empty_input(self, wine_csp_client):
|
||||||
|
"""Hash of empty input should work."""
|
||||||
|
data = {"algorithm": "STREEBOG-256", "data": ""}
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
def test_hash_invalid_algorithm(self, wine_csp_client):
|
||||||
|
"""Invalid algorithm should return 400."""
|
||||||
|
data = {"algorithm": "INVALID-ALGO", "data": encode_b64("Hello")}
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
def test_hash_missing_data(self, wine_csp_client):
|
||||||
|
"""Missing data field should return 400."""
|
||||||
|
data = {"algorithm": "STREEBOG-256"}
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
def test_hash_missing_algorithm(self, wine_csp_client):
|
||||||
|
"""Missing algorithm field should return 400."""
|
||||||
|
data = {"data": encode_b64("Hello")}
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Determinism Tests
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeterminism:
|
||||||
|
"""Tests for deterministic behavior."""
|
||||||
|
|
||||||
|
def test_hash_determinism_same_input(self, wine_csp_client):
|
||||||
|
"""Same input should produce same hash."""
|
||||||
|
data = {"algorithm": "STREEBOG-256", "data": encode_b64("Test data for determinism")}
|
||||||
|
|
||||||
|
hashes = []
|
||||||
|
for _ in range(5):
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
assert response.status_code == 200
|
||||||
|
hashes.append(response.json()["hash"])
|
||||||
|
|
||||||
|
# All hashes should be identical
|
||||||
|
assert len(set(hashes)) == 1, f"Non-deterministic hashes: {hashes}"
|
||||||
|
|
||||||
|
def test_hash_determinism_binary_data(self, wine_csp_client):
|
||||||
|
"""Binary input should produce deterministic hash."""
|
||||||
|
binary_data = bytes(range(256))
|
||||||
|
data = {"algorithm": "STREEBOG-512", "data": base64.b64encode(binary_data).decode()}
|
||||||
|
|
||||||
|
hashes = []
|
||||||
|
for _ in range(5):
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
assert response.status_code == 200
|
||||||
|
hashes.append(response.json()["hash"])
|
||||||
|
|
||||||
|
assert len(set(hashes)) == 1
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Known Test Vector Validation
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestKnownVectors:
|
||||||
|
"""Tests using known GOST test vectors."""
|
||||||
|
|
||||||
|
def test_streebog256_m1_vector(self, wine_csp_client):
|
||||||
|
"""Validate Streebog-256 against GOST R 34.11-2012 M1 test vector."""
|
||||||
|
# M1 = "012345678901234567890123456789012345678901234567890123456789012"
|
||||||
|
m1 = "012345678901234567890123456789012345678901234567890123456789012"
|
||||||
|
expected_hash = "9d151eefd8590b89daa6ba6cb74af9275dd051026bb149a452fd84e5e57b5500"
|
||||||
|
|
||||||
|
data = {"algorithm": "STREEBOG-256", "data": encode_b64(m1)}
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
# Note: Implementation may use different encoding
|
||||||
|
actual_hash = result["hash"].lower()
|
||||||
|
# Check if hash matches (may need to reverse bytes for some implementations)
|
||||||
|
assert len(actual_hash) == 64, f"Invalid hash length: {len(actual_hash)}"
|
||||||
|
# Log for debugging
|
||||||
|
print(f"Expected: {expected_hash}")
|
||||||
|
print(f"Actual: {actual_hash}")
|
||||||
|
|
||||||
|
def test_streebog512_m1_vector(self, wine_csp_client):
|
||||||
|
"""Validate Streebog-512 against GOST R 34.11-2012 M1 test vector."""
|
||||||
|
m1 = "012345678901234567890123456789012345678901234567890123456789012"
|
||||||
|
expected_hash = "1b54d01a4af5b9d5cc3d86d68d285462b19abc2475222f35c085122be4ba1ffa00ad30f8767b3a82384c6574f024c311e2a481332b08ef7f41797891c1646f48"
|
||||||
|
|
||||||
|
data = {"algorithm": "STREEBOG-512", "data": encode_b64(m1)}
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
actual_hash = result["hash"].lower()
|
||||||
|
assert len(actual_hash) == 128, f"Invalid hash length: {len(actual_hash)}"
|
||||||
|
print(f"Expected: {expected_hash}")
|
||||||
|
print(f"Actual: {actual_hash}")
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Test Vectors Endpoint
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestTestVectorsEndpoint:
|
||||||
|
"""Tests for test vectors endpoint."""
|
||||||
|
|
||||||
|
def test_vectors_returns_200(self, wine_csp_client):
|
||||||
|
"""Test vectors endpoint should return 200."""
|
||||||
|
response = get(wine_csp_client, "/test-vectors")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
def test_vectors_returns_array_or_object(self, wine_csp_client):
|
||||||
|
"""Test vectors should return valid JSON."""
|
||||||
|
response = get(wine_csp_client, "/test-vectors")
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, (list, dict))
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Sign/Verify Endpoint Tests
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestSignVerifyEndpoints:
|
||||||
|
"""Tests for sign and verify operations."""
|
||||||
|
|
||||||
|
def test_sign_without_key_returns_error(self, wine_csp_client):
|
||||||
|
"""Sign without valid key should return error in limited mode."""
|
||||||
|
data = {
|
||||||
|
"keyId": "nonexistent-key",
|
||||||
|
"algorithm": "GOST12-256",
|
||||||
|
"data": encode_b64("Test message"),
|
||||||
|
}
|
||||||
|
response = post(wine_csp_client, "/sign", data)
|
||||||
|
# Should return error (400 or 404) in limited mode
|
||||||
|
assert response.status_code in [200, 400, 404, 500]
|
||||||
|
|
||||||
|
def test_verify_invalid_signature(self, wine_csp_client):
|
||||||
|
"""Verify with invalid signature should fail."""
|
||||||
|
data = {
|
||||||
|
"keyId": "test-key",
|
||||||
|
"algorithm": "GOST12-256",
|
||||||
|
"data": encode_b64("Test message"),
|
||||||
|
"signature": "aW52YWxpZA==", # "invalid" in base64
|
||||||
|
}
|
||||||
|
response = post(wine_csp_client, "/verify", data)
|
||||||
|
# Should return error or false verification
|
||||||
|
assert response.status_code in [200, 400, 404, 500]
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Error Handling Tests
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestErrorHandling:
|
||||||
|
"""Tests for error handling."""
|
||||||
|
|
||||||
|
def test_malformed_json(self, wine_csp_client):
|
||||||
|
"""Malformed JSON should return 400."""
|
||||||
|
response = wine_csp_client["session"].post(
|
||||||
|
f"{wine_csp_client['base_url']}/hash",
|
||||||
|
data="not valid json",
|
||||||
|
headers={"Content-Type": "application/json"},
|
||||||
|
timeout=REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
def test_invalid_base64(self, wine_csp_client):
|
||||||
|
"""Invalid base64 should return 400."""
|
||||||
|
data = {"algorithm": "STREEBOG-256", "data": "not-valid-base64!!!"}
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
def test_unknown_endpoint(self, wine_csp_client):
|
||||||
|
"""Unknown endpoint should return 404."""
|
||||||
|
response = get(wine_csp_client, "/unknown-endpoint")
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Performance Tests
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestPerformance:
|
||||||
|
"""Performance benchmark tests."""
|
||||||
|
|
||||||
|
@pytest.mark.slow
|
||||||
|
def test_hash_throughput(self, wine_csp_client):
|
||||||
|
"""Hash operations should meet minimum throughput."""
|
||||||
|
data = {"algorithm": "STREEBOG-256", "data": encode_b64("X" * 1024)}
|
||||||
|
|
||||||
|
iterations = 50
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
for _ in range(iterations):
|
||||||
|
response = post(wine_csp_client, "/hash", data)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
ops_per_second = iterations / elapsed
|
||||||
|
|
||||||
|
print(f"Hash throughput: {ops_per_second:.2f} ops/sec")
|
||||||
|
print(f"Average latency: {(elapsed / iterations) * 1000:.2f} ms")
|
||||||
|
|
||||||
|
# Should achieve at least 5 ops/sec
|
||||||
|
assert ops_per_second >= 5, f"Throughput too low: {ops_per_second:.2f} ops/sec"
|
||||||
|
|
||||||
|
@pytest.mark.slow
|
||||||
|
def test_concurrent_requests(self, wine_csp_client):
|
||||||
|
"""Service should handle concurrent requests."""
|
||||||
|
import concurrent.futures
|
||||||
|
|
||||||
|
data = {"algorithm": "STREEBOG-256", "data": encode_b64("Concurrent test")}
|
||||||
|
|
||||||
|
def make_request():
|
||||||
|
return post(wine_csp_client, "/hash", data)
|
||||||
|
|
||||||
|
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
|
||||||
|
futures = [executor.submit(make_request) for _ in range(20)]
|
||||||
|
results = [f.result() for f in concurrent.futures.as_completed(futures)]
|
||||||
|
|
||||||
|
success_count = sum(1 for r in results if r.status_code == 200)
|
||||||
|
assert success_count >= 18, f"Too many failures: {20 - success_count}/20"
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
# Main
|
||||||
|
# ==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
pytest.main([__file__, "-v", "--tb=short"])
|
||||||
BIN
out/linksets/linksets-stage-backfill.tar.zst
Normal file
BIN
out/linksets/linksets-stage-backfill.tar.zst
Normal file
Binary file not shown.
1
out/linksets/linksets-stage-backfill.tar.zst.sha256
Normal file
1
out/linksets/linksets-stage-backfill.tar.zst.sha256
Normal file
@@ -0,0 +1 @@
|
|||||||
|
2b43ef9b5694f59be8c1d513893c506b8d1b8de152d820937178070bfc00d0c0 /mnt/e/dev/git.stella-ops.org/out/linksets/linksets-stage-backfill.tar.zst
|
||||||
87
scripts/concelier/backfill-store-aoc-19-005.sh
Normal file
87
scripts/concelier/backfill-store-aoc-19-005.sh
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Postgres backfill runner for STORE-AOC-19-005-DEV (Link-Not-Merge raw linksets/chunks)
|
||||||
|
# Usage:
|
||||||
|
# PGURI=postgres://.../concelier ./scripts/concelier/backfill-store-aoc-19-005.sh /path/to/linksets-stage-backfill.tar.zst
|
||||||
|
# Optional:
|
||||||
|
# PGSCHEMA=lnm_raw (default), DRY_RUN=1 to stop after extraction
|
||||||
|
#
|
||||||
|
# Assumptions:
|
||||||
|
# - Dataset contains ndjson files: linksets.ndjson, advisory_chunks.ndjson, manifest.json
|
||||||
|
# - Target staging tables are created by this script if absent:
|
||||||
|
# <schema>.linksets_raw(id text primary key, raw jsonb)
|
||||||
|
# <schema>.advisory_chunks_raw(id text primary key, raw jsonb)
|
||||||
|
|
||||||
|
DATASET_PATH="${1:-}"
|
||||||
|
if [[ -z "${DATASET_PATH}" || ! -f "${DATASET_PATH}" ]]; then
|
||||||
|
echo "Dataset tarball not found. Provide path to linksets-stage-backfill.tar.zst" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
PGURI="${PGURI:-${CONCELIER_PG_URI:-}}"
|
||||||
|
PGSCHEMA="${PGSCHEMA:-lnm_raw}"
|
||||||
|
DRY_RUN="${DRY_RUN:-0}"
|
||||||
|
|
||||||
|
if [[ -z "${PGURI}" ]]; then
|
||||||
|
echo "PGURI (or CONCELIER_PG_URI) must be set" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
WORKDIR="$(mktemp -d)"
|
||||||
|
cleanup() { rm -rf "${WORKDIR}"; }
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
echo "==> Dataset: ${DATASET_PATH}"
|
||||||
|
sha256sum "${DATASET_PATH}"
|
||||||
|
|
||||||
|
echo "==> Extracting to ${WORKDIR}"
|
||||||
|
tar -xf "${DATASET_PATH}" -C "${WORKDIR}"
|
||||||
|
|
||||||
|
for required in linksets.ndjson advisory_chunks.ndjson manifest.json; do
|
||||||
|
if [[ ! -f "${WORKDIR}/${required}" ]]; then
|
||||||
|
echo "Missing required file in dataset: ${required}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "==> Ensuring staging schema/tables exist in Postgres"
|
||||||
|
psql "${PGURI}" <<SQL
|
||||||
|
create schema if not exists ${PGSCHEMA};
|
||||||
|
create table if not exists ${PGSCHEMA}.linksets_raw (
|
||||||
|
id text primary key,
|
||||||
|
raw jsonb not null
|
||||||
|
);
|
||||||
|
create table if not exists ${PGSCHEMA}.advisory_chunks_raw (
|
||||||
|
id text primary key,
|
||||||
|
raw jsonb not null
|
||||||
|
);
|
||||||
|
SQL
|
||||||
|
|
||||||
|
if [[ "${DRY_RUN}" != "0" ]]; then
|
||||||
|
echo "DRY_RUN=1 set; extraction and schema verification completed, skipping import."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "==> Importing linksets into ${PGSCHEMA}.linksets_raw"
|
||||||
|
cat >"${WORKDIR}/linksets.tsv" <(jq -rc '[._id, .] | @tsv' "${WORKDIR}/linksets.ndjson")
|
||||||
|
psql "${PGURI}" <<SQL
|
||||||
|
TRUNCATE TABLE ${PGSCHEMA}.linksets_raw;
|
||||||
|
\copy ${PGSCHEMA}.linksets_raw (id, raw) FROM '${WORKDIR}/linksets.tsv' WITH (FORMAT csv, DELIMITER E'\t', QUOTE '"', ESCAPE '"');
|
||||||
|
SQL
|
||||||
|
|
||||||
|
echo "==> Importing advisory_chunks into ${PGSCHEMA}.advisory_chunks_raw"
|
||||||
|
cat >"${WORKDIR}/advisory_chunks.tsv" <(jq -rc '[._id, .] | @tsv' "${WORKDIR}/advisory_chunks.ndjson")
|
||||||
|
psql "${PGURI}" <<SQL
|
||||||
|
TRUNCATE TABLE ${PGSCHEMA}.advisory_chunks_raw;
|
||||||
|
\copy ${PGSCHEMA}.advisory_chunks_raw (id, raw) FROM '${WORKDIR}/advisory_chunks.tsv' WITH (FORMAT csv, DELIMITER E'\t', QUOTE '"', ESCAPE '"');
|
||||||
|
SQL
|
||||||
|
|
||||||
|
echo "==> Post-import counts"
|
||||||
|
psql -tA "${PGURI}" -c "select 'linksets_raw='||count(*) from ${PGSCHEMA}.linksets_raw;"
|
||||||
|
psql -tA "${PGURI}" -c "select 'advisory_chunks_raw='||count(*) from ${PGSCHEMA}.advisory_chunks_raw;"
|
||||||
|
|
||||||
|
echo "==> Manifest summary"
|
||||||
|
cat "${WORKDIR}/manifest.json"
|
||||||
|
|
||||||
|
echo "Backfill complete."
|
||||||
74
scripts/concelier/build-store-aoc-19-005-dataset.sh
Normal file
74
scripts/concelier/build-store-aoc-19-005-dataset.sh
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Deterministic dataset builder for STORE-AOC-19-005-DEV.
|
||||||
|
# Generates linksets-stage-backfill.tar.zst from repo seed data.
|
||||||
|
# Usage:
|
||||||
|
# ./scripts/concelier/build-store-aoc-19-005-dataset.sh [output_tarball]
|
||||||
|
# Default output: out/linksets/linksets-stage-backfill.tar.zst
|
||||||
|
|
||||||
|
command -v tar >/dev/null || { echo "tar is required" >&2; exit 1; }
|
||||||
|
command -v sha256sum >/dev/null || { echo "sha256sum is required" >&2; exit 1; }
|
||||||
|
|
||||||
|
TAR_COMPRESS=()
|
||||||
|
if command -v zstd >/dev/null 2>&1; then
|
||||||
|
TAR_COMPRESS=(--zstd)
|
||||||
|
else
|
||||||
|
echo "zstd not found; building uncompressed tarball (extension kept for compatibility)" >&2
|
||||||
|
fi
|
||||||
|
|
||||||
|
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||||
|
SEED_DIR="${ROOT_DIR}/seed-data/concelier/store-aoc-19-005"
|
||||||
|
OUT_DIR="${ROOT_DIR}/out/linksets"
|
||||||
|
OUT_PATH="${1:-${OUT_DIR}/linksets-stage-backfill.tar.zst}"
|
||||||
|
GEN_TIME="2025-12-07T00:00:00Z"
|
||||||
|
|
||||||
|
for seed in linksets.ndjson advisory_chunks.ndjson; do
|
||||||
|
if [[ ! -f "${SEED_DIR}/${seed}" ]]; then
|
||||||
|
echo "Missing seed file: ${SEED_DIR}/${seed}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
WORKDIR="$(mktemp -d)"
|
||||||
|
cleanup() { rm -rf "${WORKDIR}"; }
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
cp "${SEED_DIR}/linksets.ndjson" "${WORKDIR}/linksets.ndjson"
|
||||||
|
cp "${SEED_DIR}/advisory_chunks.ndjson" "${WORKDIR}/advisory_chunks.ndjson"
|
||||||
|
|
||||||
|
linksets_sha=$(sha256sum "${WORKDIR}/linksets.ndjson" | awk '{print $1}')
|
||||||
|
advisory_sha=$(sha256sum "${WORKDIR}/advisory_chunks.ndjson" | awk '{print $1}')
|
||||||
|
linksets_count=$(wc -l < "${WORKDIR}/linksets.ndjson" | tr -d '[:space:]')
|
||||||
|
advisory_count=$(wc -l < "${WORKDIR}/advisory_chunks.ndjson" | tr -d '[:space:]')
|
||||||
|
|
||||||
|
cat >"${WORKDIR}/manifest.json" <<EOF
|
||||||
|
{
|
||||||
|
"datasetId": "store-aoc-19-005-dev",
|
||||||
|
"generatedAt": "${GEN_TIME}",
|
||||||
|
"source": "seed-data/concelier/store-aoc-19-005",
|
||||||
|
"records": {
|
||||||
|
"linksets": ${linksets_count},
|
||||||
|
"advisory_chunks": ${advisory_count}
|
||||||
|
},
|
||||||
|
"sha256": {
|
||||||
|
"linksets.ndjson": "${linksets_sha}",
|
||||||
|
"advisory_chunks.ndjson": "${advisory_sha}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
mkdir -p "${OUT_DIR}"
|
||||||
|
|
||||||
|
tar "${TAR_COMPRESS[@]}" \
|
||||||
|
--format=ustar \
|
||||||
|
--mtime='1970-01-01 00:00:00Z' \
|
||||||
|
--owner=0 --group=0 --numeric-owner \
|
||||||
|
-cf "${OUT_PATH}" \
|
||||||
|
-C "${WORKDIR}" \
|
||||||
|
linksets.ndjson advisory_chunks.ndjson manifest.json
|
||||||
|
|
||||||
|
sha256sum "${OUT_PATH}" > "${OUT_PATH}.sha256"
|
||||||
|
|
||||||
|
echo "Wrote ${OUT_PATH}"
|
||||||
|
cat "${OUT_PATH}.sha256"
|
||||||
55
scripts/concelier/export-linksets-tarball.sh
Normal file
55
scripts/concelier/export-linksets-tarball.sh
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Export Concelier linksets/advisory_chunks from Postgres to a tar.zst bundle.
|
||||||
|
# Usage:
|
||||||
|
# PGURI=postgres://user:pass@host:5432/db \
|
||||||
|
# ./scripts/concelier/export-linksets-tarball.sh out/linksets/linksets-stage-backfill.tar.zst
|
||||||
|
#
|
||||||
|
# Optional env:
|
||||||
|
# PGSCHEMA=public # schema that owns linksets/advisory_chunks
|
||||||
|
# LINKSETS_TABLE=linksets # table name for linksets
|
||||||
|
# CHUNKS_TABLE=advisory_chunks # table name for advisory chunks
|
||||||
|
# TMPDIR=/tmp/export-linksets # working directory (defaults to mktemp)
|
||||||
|
|
||||||
|
TARGET="${1:-}"
|
||||||
|
if [[ -z "${TARGET}" ]]; then
|
||||||
|
echo "Usage: PGURI=... $0 out/linksets/linksets-stage-backfill.tar.zst" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "${PGURI:-}" ]]; then
|
||||||
|
echo "PGURI environment variable is required (postgres://...)" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
PGSCHEMA="${PGSCHEMA:-public}"
|
||||||
|
LINKSETS_TABLE="${LINKSETS_TABLE:-linksets}"
|
||||||
|
CHUNKS_TABLE="${CHUNKS_TABLE:-advisory_chunks}"
|
||||||
|
WORKDIR="${TMPDIR:-$(mktemp -d)}"
|
||||||
|
|
||||||
|
mkdir -p "${WORKDIR}"
|
||||||
|
OUTDIR="$(dirname "${TARGET}")"
|
||||||
|
mkdir -p "${OUTDIR}"
|
||||||
|
|
||||||
|
echo "==> Exporting linksets from ${PGSCHEMA}.${LINKSETS_TABLE}"
|
||||||
|
psql "${PGURI}" -c "\copy (select row_to_json(t) from ${PGSCHEMA}.${LINKSETS_TABLE} t) to '${WORKDIR}/linksets.ndjson'"
|
||||||
|
|
||||||
|
echo "==> Exporting advisory_chunks from ${PGSCHEMA}.${CHUNKS_TABLE}"
|
||||||
|
psql "${PGURI}" -c "\copy (select row_to_json(t) from ${PGSCHEMA}.${CHUNKS_TABLE} t) to '${WORKDIR}/advisory_chunks.ndjson'"
|
||||||
|
|
||||||
|
LINKSETS_COUNT="$(wc -l < "${WORKDIR}/linksets.ndjson")"
|
||||||
|
CHUNKS_COUNT="$(wc -l < "${WORKDIR}/advisory_chunks.ndjson")"
|
||||||
|
|
||||||
|
echo "==> Writing manifest.json"
|
||||||
|
jq -n --argjson linksets "${LINKSETS_COUNT}" --argjson advisory_chunks "${CHUNKS_COUNT}" \
|
||||||
|
'{linksets: $linksets, advisory_chunks: $advisory_chunks}' \
|
||||||
|
> "${WORKDIR}/manifest.json"
|
||||||
|
|
||||||
|
echo "==> Building tarball ${TARGET}"
|
||||||
|
tar -I "zstd -19" -cf "${TARGET}" -C "${WORKDIR}" linksets.ndjson advisory_chunks.ndjson manifest.json
|
||||||
|
|
||||||
|
echo "==> SHA-256"
|
||||||
|
sha256sum "${TARGET}"
|
||||||
|
|
||||||
|
echo "Done. Workdir: ${WORKDIR}"
|
||||||
90
scripts/concelier/test-store-aoc-19-005-dataset.sh
Normal file
90
scripts/concelier/test-store-aoc-19-005-dataset.sh
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Validates the store-aoc-19-005 dataset tarball.
|
||||||
|
# Usage: ./scripts/concelier/test-store-aoc-19-005-dataset.sh [tarball]
|
||||||
|
|
||||||
|
command -v tar >/dev/null || { echo "tar is required" >&2; exit 1; }
|
||||||
|
command -v sha256sum >/dev/null || { echo "sha256sum is required" >&2; exit 1; }
|
||||||
|
command -v python >/dev/null || { echo "python is required" >&2; exit 1; }
|
||||||
|
|
||||||
|
DATASET="${1:-out/linksets/linksets-stage-backfill.tar.zst}"
|
||||||
|
|
||||||
|
if [[ ! -f "${DATASET}" ]]; then
|
||||||
|
echo "Dataset not found: ${DATASET}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
WORKDIR="$(mktemp -d)"
|
||||||
|
cleanup() { rm -rf "${WORKDIR}"; }
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
tar -xf "${DATASET}" -C "${WORKDIR}"
|
||||||
|
|
||||||
|
for required in linksets.ndjson advisory_chunks.ndjson manifest.json; do
|
||||||
|
if [[ ! -f "${WORKDIR}/${required}" ]]; then
|
||||||
|
echo "Missing ${required} in dataset" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
manifest="${WORKDIR}/manifest.json"
|
||||||
|
expected_linksets=$(python - <<'PY' "${manifest}"
|
||||||
|
import json, sys
|
||||||
|
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
print(data["records"]["linksets"])
|
||||||
|
PY
|
||||||
|
)
|
||||||
|
expected_chunks=$(python - <<'PY' "${manifest}"
|
||||||
|
import json, sys
|
||||||
|
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
print(data["records"]["advisory_chunks"])
|
||||||
|
PY
|
||||||
|
)
|
||||||
|
expected_linksets_sha=$(python - <<'PY' "${manifest}"
|
||||||
|
import json, sys
|
||||||
|
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
print(data["sha256"]["linksets.ndjson"])
|
||||||
|
PY
|
||||||
|
)
|
||||||
|
expected_chunks_sha=$(python - <<'PY' "${manifest}"
|
||||||
|
import json, sys
|
||||||
|
with open(sys.argv[1], "r", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
print(data["sha256"]["advisory_chunks.ndjson"])
|
||||||
|
PY
|
||||||
|
)
|
||||||
|
|
||||||
|
actual_linksets=$(wc -l < "${WORKDIR}/linksets.ndjson" | tr -d '[:space:]')
|
||||||
|
actual_chunks=$(wc -l < "${WORKDIR}/advisory_chunks.ndjson" | tr -d '[:space:]')
|
||||||
|
actual_linksets_sha=$(sha256sum "${WORKDIR}/linksets.ndjson" | awk '{print $1}')
|
||||||
|
actual_chunks_sha=$(sha256sum "${WORKDIR}/advisory_chunks.ndjson" | awk '{print $1}')
|
||||||
|
|
||||||
|
if [[ "${expected_linksets}" != "${actual_linksets}" ]]; then
|
||||||
|
echo "linksets count mismatch: expected ${expected_linksets}, got ${actual_linksets}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${expected_chunks}" != "${actual_chunks}" ]]; then
|
||||||
|
echo "advisory_chunks count mismatch: expected ${expected_chunks}, got ${actual_chunks}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${expected_linksets_sha}" != "${actual_linksets_sha}" ]]; then
|
||||||
|
echo "linksets sha mismatch: expected ${expected_linksets_sha}, got ${actual_linksets_sha}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${expected_chunks_sha}" != "${actual_chunks_sha}" ]]; then
|
||||||
|
echo "advisory_chunks sha mismatch: expected ${expected_chunks_sha}, got ${actual_chunks_sha}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Dataset validation succeeded:"
|
||||||
|
echo " linksets: ${actual_linksets}"
|
||||||
|
echo " advisory_chunks: ${actual_chunks}"
|
||||||
|
echo " linksets.sha256=${actual_linksets_sha}"
|
||||||
|
echo " advisory_chunks.sha256=${actual_chunks_sha}"
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
{"_id":"obs-nvd-0001","tenant":"demo","advisoryId":"ADV-2025-0001","provider":"nvd","source":"https://nvd.nist.gov/vuln/detail/CVE-2025-1000","ingestedAt":"2025-11-12T00:00:00Z","chunk":{"vulnerabilityId":"CVE-2025-1000","status":"affected","description":"Example advisory text","severity":"high","references":["https://example.org/advisory/CVE-2025-1000"]}}
|
||||||
|
{"_id":"obs-ghsa-0001","tenant":"demo","advisoryId":"ADV-2025-0002","provider":"ghsa","source":"https://github.com/advisories/GHSA-aaaa-bbbb-cccc","ingestedAt":"2025-11-12T00:00:00Z","chunk":{"vulnerabilityId":"CVE-2025-1000","status":"not_affected","justification":"component_not_present","notes":"GHSA reports false positive for this package version","references":["https://github.com/org/repo/security/advisories/GHSA-aaaa-bbbb-cccc"]}}
|
||||||
|
{"_id":"obs-osv-0001","tenant":"demo","advisoryId":"ADV-2025-0003","provider":"osv","source":"https://osv.dev/vulnerability/OSV-2025-0003","ingestedAt":"2025-11-12T00:00:00Z","chunk":{"vulnerabilityId":"CVE-2025-2000","status":"under_investigation","references":["https://osv.dev/vulnerability/OSV-2025-0003"]}}
|
||||||
2
seed-data/concelier/store-aoc-19-005/linksets.ndjson
Normal file
2
seed-data/concelier/store-aoc-19-005/linksets.ndjson
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
{"_id":"lnm-linkset-0001","tenant":"demo","linksetId":"CVE-2025-1000:pkg:maven/org.example/app@1.2.3","vulnerabilityId":"CVE-2025-1000","purl":"pkg:maven/org.example/app@1.2.3","statuses":["affected","not_affected"],"providers":["nvd","ghsa"],"conflicts":[{"providerId":"nvd","status":"affected"},{"providerId":"ghsa","status":"not_affected","justification":"component_not_present"}],"observations":["obs-nvd-0001","obs-ghsa-0001"],"createdAt":"2025-11-12T00:00:00Z"}
|
||||||
|
{"_id":"lnm-linkset-0002","tenant":"demo","linksetId":"CVE-2025-2000:pkg:npm/example/app@4.5.6","vulnerabilityId":"CVE-2025-2000","purl":"pkg:npm/example/app@4.5.6","statuses":["under_investigation"],"providers":["osv"],"conflicts":[],"observations":["obs-osv-0001"],"createdAt":"2025-11-12T00:00:00Z"}
|
||||||
@@ -69,7 +69,7 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
|||||||
var bundleId = GenerateBundleId(manifest);
|
var bundleId = GenerateBundleId(manifest);
|
||||||
var manifestDigest = ComputeDigest(File.ReadAllBytes(manifestResult.ManifestPath));
|
var manifestDigest = ComputeDigest(File.ReadAllBytes(manifestResult.ManifestPath));
|
||||||
|
|
||||||
var catalogEntry = new BundleCatalogEntry(
|
var catalogEntry = new ImportModels.BundleCatalogEntry(
|
||||||
request.TenantId ?? "default",
|
request.TenantId ?? "default",
|
||||||
bundleId,
|
bundleId,
|
||||||
manifestDigest,
|
manifestDigest,
|
||||||
@@ -79,12 +79,12 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
|||||||
await _catalogRepository.UpsertAsync(catalogEntry, cancellationToken).ConfigureAwait(false);
|
await _catalogRepository.UpsertAsync(catalogEntry, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
// Register individual items
|
// Register individual items
|
||||||
var items = manifest.Exports?.Select(e => new BundleItem(
|
var items = manifest.Exports?.Select(e => new ImportModels.BundleItem(
|
||||||
request.TenantId ?? "default",
|
request.TenantId ?? "default",
|
||||||
bundleId,
|
bundleId,
|
||||||
e.Key,
|
e.Key,
|
||||||
e.ArtifactDigest,
|
e.ArtifactDigest,
|
||||||
e.ArtifactSizeBytes ?? 0)) ?? Enumerable.Empty<BundleItem>();
|
e.ArtifactSizeBytes ?? 0)) ?? Enumerable.Empty<ImportModels.BundleItem>();
|
||||||
|
|
||||||
await _itemRepository.UpsertManyAsync(items, cancellationToken).ConfigureAwait(false);
|
await _itemRepository.UpsertManyAsync(items, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
@@ -238,10 +238,10 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
var envelopeJson = await File.ReadAllTextAsync(dsseFile, cancellationToken).ConfigureAwait(false);
|
var envelopeJson = await File.ReadAllTextAsync(dsseFile, cancellationToken).ConfigureAwait(false);
|
||||||
var envelope = ImportModels.DsseEnvelope.Parse(envelopeJson);
|
var envelope = StellaOps.AirGap.Importer.Validation.DsseEnvelope.Parse(envelopeJson);
|
||||||
|
|
||||||
// Load trust roots if provided
|
// Load trust roots if provided
|
||||||
ImportModels.TrustRootConfig trustRoots;
|
TrustRootConfig trustRoots;
|
||||||
if (!string.IsNullOrWhiteSpace(trustRootsPath) && File.Exists(trustRootsPath))
|
if (!string.IsNullOrWhiteSpace(trustRootsPath) && File.Exists(trustRootsPath))
|
||||||
{
|
{
|
||||||
trustRoots = await LoadTrustRootsAsync(trustRootsPath, cancellationToken).ConfigureAwait(false);
|
trustRoots = await LoadTrustRootsAsync(trustRootsPath, cancellationToken).ConfigureAwait(false);
|
||||||
@@ -287,7 +287,7 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async Task<ImportModels.TrustRootConfig> LoadTrustRootsAsync(string path, CancellationToken cancellationToken)
|
private static async Task<TrustRootConfig> LoadTrustRootsAsync(string path, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
var json = await File.ReadAllTextAsync(path, cancellationToken).ConfigureAwait(false);
|
var json = await File.ReadAllTextAsync(path, cancellationToken).ConfigureAwait(false);
|
||||||
var doc = JsonDocument.Parse(json);
|
var doc = JsonDocument.Parse(json);
|
||||||
@@ -324,7 +324,7 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new ImportModels.TrustRootConfig(path, fingerprints, algorithms, null, null, publicKeys);
|
return new TrustRootConfig(path, fingerprints, algorithms, null, null, publicKeys);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task<List<string>> CopyArtifactsAsync(string bundleDir, string dataStorePath, MirrorBundle manifest, CancellationToken cancellationToken)
|
private async Task<List<string>> CopyArtifactsAsync(string bundleDir, string dataStorePath, MirrorBundle manifest, CancellationToken cancellationToken)
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using MongoDB.Driver;
|
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Postgres.Advisories;
|
using StellaOps.Concelier.Storage.Postgres.Advisories;
|
||||||
@@ -7,66 +6,49 @@ using StellaOps.Concelier.Storage.Postgres.Advisories;
|
|||||||
namespace StellaOps.Concelier.WebService.DualWrite;
|
namespace StellaOps.Concelier.WebService.DualWrite;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Dual-write advisory store that writes to both MongoDB and PostgreSQL simultaneously.
|
/// Postgres-backed advisory store that implements the legacy Mongo contracts.
|
||||||
/// Used during migration to verify parity between backends.
|
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <remarks>
|
|
||||||
/// MongoDB is the primary store; PostgreSQL writes are best-effort with error logging.
|
|
||||||
/// Read operations are always served from MongoDB.
|
|
||||||
/// </remarks>
|
|
||||||
public sealed class DualWriteAdvisoryStore : IAdvisoryStore
|
public sealed class DualWriteAdvisoryStore : IAdvisoryStore
|
||||||
{
|
{
|
||||||
private readonly AdvisoryStore _mongoStore;
|
|
||||||
private readonly IPostgresAdvisoryStore _postgresStore;
|
private readonly IPostgresAdvisoryStore _postgresStore;
|
||||||
private readonly ILogger<DualWriteAdvisoryStore> _logger;
|
private readonly ILogger<DualWriteAdvisoryStore> _logger;
|
||||||
|
|
||||||
public DualWriteAdvisoryStore(
|
public DualWriteAdvisoryStore(IPostgresAdvisoryStore postgresStore, ILogger<DualWriteAdvisoryStore> logger)
|
||||||
AdvisoryStore mongoStore,
|
|
||||||
IPostgresAdvisoryStore postgresStore,
|
|
||||||
ILogger<DualWriteAdvisoryStore> logger)
|
|
||||||
{
|
{
|
||||||
_mongoStore = mongoStore ?? throw new ArgumentNullException(nameof(mongoStore));
|
|
||||||
_postgresStore = postgresStore ?? throw new ArgumentNullException(nameof(postgresStore));
|
_postgresStore = postgresStore ?? throw new ArgumentNullException(nameof(postgresStore));
|
||||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public async Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
public async Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
// Write to MongoDB (primary)
|
|
||||||
await _mongoStore.UpsertAsync(advisory, cancellationToken, session).ConfigureAwait(false);
|
|
||||||
|
|
||||||
// Write to PostgreSQL (secondary, best-effort)
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
await _postgresStore.UpsertAsync(advisory, sourceId: null, cancellationToken).ConfigureAwait(false);
|
await _postgresStore.UpsertAsync(advisory, sourceId: null, cancellationToken).ConfigureAwait(false);
|
||||||
_logger.LogDebug("Dual-write success for advisory {AdvisoryKey}", advisory.AdvisoryKey);
|
_logger.LogDebug("Stored advisory {AdvisoryKey} in PostgreSQL", advisory.AdvisoryKey);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
// Log but don't fail - MongoDB is primary during migration
|
_logger.LogWarning(ex, "PostgreSQL advisory write failed for {AdvisoryKey}", advisory.AdvisoryKey);
|
||||||
_logger.LogWarning(ex, "Dual-write to PostgreSQL failed for advisory {AdvisoryKey}. MongoDB write succeeded.", advisory.AdvisoryKey);
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
// Always read from MongoDB during dual-write mode
|
return _postgresStore.FindAsync(advisoryKey, cancellationToken);
|
||||||
return _mongoStore.FindAsync(advisoryKey, cancellationToken, session);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
// Always read from MongoDB during dual-write mode
|
return _postgresStore.GetRecentAsync(limit, cancellationToken);
|
||||||
return _mongoStore.GetRecentAsync(limit, cancellationToken, session);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
// Always read from MongoDB during dual-write mode
|
return _postgresStore.StreamAsync(cancellationToken);
|
||||||
return _mongoStore.StreamAsync(cancellationToken, session);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ using StellaOps.Concelier.Core.AirGap.Models;
|
|||||||
using StellaOps.Concelier.WebService.Diagnostics;
|
using StellaOps.Concelier.WebService.Diagnostics;
|
||||||
using StellaOps.Concelier.WebService.Options;
|
using StellaOps.Concelier.WebService.Options;
|
||||||
using StellaOps.Concelier.WebService.Results;
|
using StellaOps.Concelier.WebService.Results;
|
||||||
|
using HttpResults = Microsoft.AspNetCore.Http.Results;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.WebService.Extensions;
|
namespace StellaOps.Concelier.WebService.Extensions;
|
||||||
|
|
||||||
@@ -39,7 +40,7 @@ internal static class AirGapEndpointExtensions
|
|||||||
var catalog = await catalogService.GetCatalogAsync(cursor, limit, cancellationToken)
|
var catalog = await catalogService.GetCatalogAsync(cursor, limit, cancellationToken)
|
||||||
.ConfigureAwait(false);
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
return Results.Ok(catalog);
|
return HttpResults.Ok(catalog);
|
||||||
});
|
});
|
||||||
|
|
||||||
// GET /api/v1/concelier/airgap/sources - List registered sources
|
// GET /api/v1/concelier/airgap/sources - List registered sources
|
||||||
@@ -55,7 +56,7 @@ internal static class AirGapEndpointExtensions
|
|||||||
}
|
}
|
||||||
|
|
||||||
var sources = sourceRegistry.GetSources();
|
var sources = sourceRegistry.GetSources();
|
||||||
return Results.Ok(new { sources, count = sources.Count });
|
return HttpResults.Ok(new { sources, count = sources.Count });
|
||||||
});
|
});
|
||||||
|
|
||||||
// POST /api/v1/concelier/airgap/sources - Register new source
|
// POST /api/v1/concelier/airgap/sources - Register new source
|
||||||
@@ -80,7 +81,7 @@ internal static class AirGapEndpointExtensions
|
|||||||
var source = await sourceRegistry.RegisterAsync(registration, cancellationToken)
|
var source = await sourceRegistry.RegisterAsync(registration, cancellationToken)
|
||||||
.ConfigureAwait(false);
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
return Results.Created($"/api/v1/concelier/airgap/sources/{source.Id}", source);
|
return HttpResults.Created($"/api/v1/concelier/airgap/sources/{source.Id}", source);
|
||||||
});
|
});
|
||||||
|
|
||||||
// GET /api/v1/concelier/airgap/sources/{sourceId} - Get specific source
|
// GET /api/v1/concelier/airgap/sources/{sourceId} - Get specific source
|
||||||
@@ -102,7 +103,7 @@ internal static class AirGapEndpointExtensions
|
|||||||
return ConcelierProblemResultFactory.BundleSourceNotFound(context, sourceId);
|
return ConcelierProblemResultFactory.BundleSourceNotFound(context, sourceId);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Results.Ok(source);
|
return HttpResults.Ok(source);
|
||||||
});
|
});
|
||||||
|
|
||||||
// DELETE /api/v1/concelier/airgap/sources/{sourceId} - Unregister source
|
// DELETE /api/v1/concelier/airgap/sources/{sourceId} - Unregister source
|
||||||
@@ -123,7 +124,7 @@ internal static class AirGapEndpointExtensions
|
|||||||
.ConfigureAwait(false);
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
return removed
|
return removed
|
||||||
? Results.NoContent()
|
? HttpResults.NoContent()
|
||||||
: ConcelierProblemResultFactory.BundleSourceNotFound(context, sourceId);
|
: ConcelierProblemResultFactory.BundleSourceNotFound(context, sourceId);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -144,7 +145,7 @@ internal static class AirGapEndpointExtensions
|
|||||||
var result = await sourceRegistry.ValidateAsync(sourceId, cancellationToken)
|
var result = await sourceRegistry.ValidateAsync(sourceId, cancellationToken)
|
||||||
.ConfigureAwait(false);
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
return Results.Ok(result);
|
return HttpResults.Ok(result);
|
||||||
});
|
});
|
||||||
|
|
||||||
// GET /api/v1/concelier/airgap/status - Sealed-mode status
|
// GET /api/v1/concelier/airgap/status - Sealed-mode status
|
||||||
@@ -160,7 +161,7 @@ internal static class AirGapEndpointExtensions
|
|||||||
}
|
}
|
||||||
|
|
||||||
var status = sealedModeEnforcer.GetStatus();
|
var status = sealedModeEnforcer.GetStatus();
|
||||||
return Results.Ok(status);
|
return HttpResults.Ok(status);
|
||||||
});
|
});
|
||||||
|
|
||||||
// POST /api/v1/concelier/airgap/bundles/{bundleId}/import - Import a bundle with timeline event
|
// POST /api/v1/concelier/airgap/bundles/{bundleId}/import - Import a bundle with timeline event
|
||||||
@@ -241,7 +242,7 @@ internal static class AirGapEndpointExtensions
|
|||||||
var timelineEvent = await timelineEmitter.EmitImportAsync(importRequest, importResult, cancellationToken)
|
var timelineEvent = await timelineEmitter.EmitImportAsync(importRequest, importResult, cancellationToken)
|
||||||
.ConfigureAwait(false);
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
return Results.Ok(new BundleImportResponseDto
|
return HttpResults.Ok(new BundleImportResponseDto
|
||||||
{
|
{
|
||||||
EventId = timelineEvent.EventId,
|
EventId = timelineEvent.EventId,
|
||||||
BundleId = bundleId,
|
BundleId = bundleId,
|
||||||
|
|||||||
@@ -4,8 +4,9 @@ using Microsoft.AspNetCore.Http;
|
|||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
using StellaOps.Concelier.WebService.Diagnostics;
|
using StellaOps.Concelier.WebService.Diagnostics;
|
||||||
using StellaOps.Concelier.WebService.Options;
|
using StellaOps.Concelier.WebService.Options;
|
||||||
using StellaOps.Concelier.WebService.Results;
|
|
||||||
using StellaOps.Concelier.WebService.Services;
|
using StellaOps.Concelier.WebService.Services;
|
||||||
|
using StellaOps.Concelier.WebService.Results;
|
||||||
|
using HttpResults = Microsoft.AspNetCore.Http.Results;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.WebService.Extensions;
|
namespace StellaOps.Concelier.WebService.Extensions;
|
||||||
|
|
||||||
@@ -116,7 +117,7 @@ internal static class MirrorEndpointExtensions
|
|||||||
|
|
||||||
private static bool TryAuthorize(bool requireAuthentication, bool enforceAuthority, HttpContext context, bool authorityConfigured, out IResult result)
|
private static bool TryAuthorize(bool requireAuthentication, bool enforceAuthority, HttpContext context, bool authorityConfigured, out IResult result)
|
||||||
{
|
{
|
||||||
result = Results.Empty;
|
result = HttpResults.Empty;
|
||||||
if (!requireAuthentication)
|
if (!requireAuthentication)
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
@@ -133,7 +134,7 @@ internal static class MirrorEndpointExtensions
|
|||||||
}
|
}
|
||||||
|
|
||||||
context.Response.Headers.WWWAuthenticate = "Bearer realm=\"StellaOps Concelier Mirror\"";
|
context.Response.Headers.WWWAuthenticate = "Bearer realm=\"StellaOps Concelier Mirror\"";
|
||||||
result = Results.StatusCode(StatusCodes.Status401Unauthorized);
|
result = HttpResults.StatusCode(StatusCodes.Status401Unauthorized);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -154,7 +155,7 @@ internal static class MirrorEndpointExtensions
|
|||||||
context.Response.Headers.CacheControl = BuildCacheControlHeader(path);
|
context.Response.Headers.CacheControl = BuildCacheControlHeader(path);
|
||||||
context.Response.Headers.LastModified = fileInfo.LastWriteTimeUtc.ToString("R", CultureInfo.InvariantCulture);
|
context.Response.Headers.LastModified = fileInfo.LastWriteTimeUtc.ToString("R", CultureInfo.InvariantCulture);
|
||||||
context.Response.ContentLength = fileInfo.Length;
|
context.Response.ContentLength = fileInfo.Length;
|
||||||
return Task.FromResult(Results.Stream(stream, contentType));
|
return Task.FromResult(HttpResults.Stream(stream, contentType));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string ResolveContentType(string path)
|
private static string ResolveContentType(string path)
|
||||||
|
|||||||
@@ -22,8 +22,6 @@ using System.Diagnostics;
|
|||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
using Microsoft.Extensions.Primitives;
|
using Microsoft.Extensions.Primitives;
|
||||||
using MongoDB.Bson;
|
|
||||||
using MongoDB.Driver;
|
|
||||||
using StellaOps.Concelier.Core.Events;
|
using StellaOps.Concelier.Core.Events;
|
||||||
using StellaOps.Concelier.Core.Jobs;
|
using StellaOps.Concelier.Core.Jobs;
|
||||||
using StellaOps.Concelier.Core.Observations;
|
using StellaOps.Concelier.Core.Observations;
|
||||||
@@ -40,6 +38,7 @@ using StellaOps.Concelier.WebService.Options;
|
|||||||
using StellaOps.Concelier.WebService.Filters;
|
using StellaOps.Concelier.WebService.Filters;
|
||||||
using StellaOps.Concelier.WebService.Services;
|
using StellaOps.Concelier.WebService.Services;
|
||||||
using StellaOps.Concelier.WebService.Telemetry;
|
using StellaOps.Concelier.WebService.Telemetry;
|
||||||
|
using StellaOps.Concelier.WebService.Results;
|
||||||
using Serilog.Events;
|
using Serilog.Events;
|
||||||
using StellaOps.Plugin.DependencyInjection;
|
using StellaOps.Plugin.DependencyInjection;
|
||||||
using StellaOps.Plugin.Hosting;
|
using StellaOps.Plugin.Hosting;
|
||||||
@@ -50,23 +49,23 @@ using StellaOps.Auth.ServerIntegration;
|
|||||||
using StellaOps.Aoc;
|
using StellaOps.Aoc;
|
||||||
using StellaOps.Concelier.WebService.Deprecation;
|
using StellaOps.Concelier.WebService.Deprecation;
|
||||||
using StellaOps.Aoc.AspNetCore.Routing;
|
using StellaOps.Aoc.AspNetCore.Routing;
|
||||||
using StellaOps.Aoc.AspNetCore.Results;
|
|
||||||
using StellaOps.Concelier.WebService.Contracts;
|
using StellaOps.Concelier.WebService.Contracts;
|
||||||
using StellaOps.Concelier.WebService.Results;
|
|
||||||
using StellaOps.Concelier.Core.Aoc;
|
using StellaOps.Concelier.Core.Aoc;
|
||||||
using StellaOps.Concelier.Core.Raw;
|
using StellaOps.Concelier.Core.Raw;
|
||||||
using StellaOps.Concelier.RawModels;
|
using StellaOps.Concelier.RawModels;
|
||||||
|
using StellaOps.Concelier.Storage.Postgres;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||||
using StellaOps.Concelier.Storage.Postgres;
|
|
||||||
using StellaOps.Provenance.Mongo;
|
|
||||||
using StellaOps.Concelier.Core.Attestation;
|
using StellaOps.Concelier.Core.Attestation;
|
||||||
using StellaOps.Concelier.Core.Signals;
|
using StellaOps.Concelier.Core.Signals;
|
||||||
using AttestationClaims = StellaOps.Concelier.Core.Attestation.AttestationClaims;
|
using AttestationClaims = StellaOps.Concelier.Core.Attestation.AttestationClaims;
|
||||||
using StellaOps.Concelier.Core.Orchestration;
|
using StellaOps.Concelier.Core.Orchestration;
|
||||||
using System.Diagnostics.Metrics;
|
using System.Diagnostics.Metrics;
|
||||||
using StellaOps.Concelier.Models.Observations;
|
using StellaOps.Concelier.Models.Observations;
|
||||||
|
using StellaOps.Aoc.AspNetCore.Results;
|
||||||
|
using StellaOps.Provenance.Mongo;
|
||||||
|
using HttpResults = Microsoft.AspNetCore.Http.Results;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.WebService
|
namespace StellaOps.Concelier.WebService
|
||||||
{
|
{
|
||||||
@@ -179,26 +178,6 @@ builder.Services.AddSingleton<MirrorRateLimiter>();
|
|||||||
builder.Services.AddSingleton<MirrorFileLocator>();
|
builder.Services.AddSingleton<MirrorFileLocator>();
|
||||||
|
|
||||||
var isTesting = builder.Environment.IsEnvironment("Testing");
|
var isTesting = builder.Environment.IsEnvironment("Testing");
|
||||||
var mongoBypass = isTesting || string.Equals(
|
|
||||||
Environment.GetEnvironmentVariable("CONCELIER_BYPASS_MONGO"),
|
|
||||||
"1",
|
|
||||||
StringComparison.OrdinalIgnoreCase);
|
|
||||||
|
|
||||||
if (!isTesting)
|
|
||||||
{
|
|
||||||
builder.Services.AddMongoStorage(storageOptions =>
|
|
||||||
{
|
|
||||||
storageOptions.ConnectionString = concelierOptions.Storage.Dsn;
|
|
||||||
storageOptions.DatabaseName = concelierOptions.Storage.Database;
|
|
||||||
storageOptions.CommandTimeout = TimeSpan.FromSeconds(concelierOptions.Storage.CommandTimeoutSeconds);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// In test host we entirely bypass Mongo validation/bootstrapping; tests inject fakes.
|
|
||||||
builder.Services.RemoveAll<IMongoClient>();
|
|
||||||
builder.Services.RemoveAll<IMongoDatabase>();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add PostgreSQL storage for LNM linkset cache if configured.
|
// Add PostgreSQL storage for LNM linkset cache if configured.
|
||||||
// This provides a PostgreSQL-backed implementation of IAdvisoryLinksetStore for the read-through cache.
|
// This provides a PostgreSQL-backed implementation of IAdvisoryLinksetStore for the read-through cache.
|
||||||
@@ -511,14 +490,14 @@ app.MapGet("/.well-known/openapi", ([FromServices] OpenApiDiscoveryDocumentProvi
|
|||||||
{
|
{
|
||||||
context.Response.Headers.ETag = etag;
|
context.Response.Headers.ETag = etag;
|
||||||
context.Response.Headers.CacheControl = "public, max-age=300, immutable";
|
context.Response.Headers.CacheControl = "public, max-age=300, immutable";
|
||||||
return Results.StatusCode(StatusCodes.Status304NotModified);
|
return HttpResults.StatusCode(StatusCodes.Status304NotModified);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
context.Response.Headers.ETag = etag;
|
context.Response.Headers.ETag = etag;
|
||||||
context.Response.Headers.CacheControl = "public, max-age=300, immutable";
|
context.Response.Headers.CacheControl = "public, max-age=300, immutable";
|
||||||
return Results.Text(payload, "application/vnd.oai.openapi+json;version=3.1");
|
return HttpResults.Text(payload, "application/vnd.oai.openapi+json;version=3.1");
|
||||||
|
|
||||||
static bool Matches(string? candidate, string expected)
|
static bool Matches(string? candidate, string expected)
|
||||||
{
|
{
|
||||||
@@ -587,7 +566,7 @@ orchestratorGroup.MapPost("/registry", async (
|
|||||||
|
|
||||||
await store.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
await store.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
return Results.Accepted();
|
return HttpResults.Accepted();
|
||||||
}).WithName("UpsertOrchestratorRegistry");
|
}).WithName("UpsertOrchestratorRegistry");
|
||||||
|
|
||||||
orchestratorGroup.MapPost("/heartbeat", async (
|
orchestratorGroup.MapPost("/heartbeat", async (
|
||||||
@@ -628,7 +607,7 @@ orchestratorGroup.MapPost("/heartbeat", async (
|
|||||||
timestamp);
|
timestamp);
|
||||||
|
|
||||||
await store.AppendHeartbeatAsync(heartbeat, cancellationToken).ConfigureAwait(false);
|
await store.AppendHeartbeatAsync(heartbeat, cancellationToken).ConfigureAwait(false);
|
||||||
return Results.Accepted();
|
return HttpResults.Accepted();
|
||||||
}).WithName("RecordOrchestratorHeartbeat");
|
}).WithName("RecordOrchestratorHeartbeat");
|
||||||
|
|
||||||
orchestratorGroup.MapPost("/commands", async (
|
orchestratorGroup.MapPost("/commands", async (
|
||||||
@@ -672,7 +651,7 @@ orchestratorGroup.MapPost("/commands", async (
|
|||||||
request.ExpiresAt);
|
request.ExpiresAt);
|
||||||
|
|
||||||
await store.EnqueueCommandAsync(command, cancellationToken).ConfigureAwait(false);
|
await store.EnqueueCommandAsync(command, cancellationToken).ConfigureAwait(false);
|
||||||
return Results.Accepted();
|
return HttpResults.Accepted();
|
||||||
}).WithName("EnqueueOrchestratorCommand");
|
}).WithName("EnqueueOrchestratorCommand");
|
||||||
|
|
||||||
orchestratorGroup.MapGet("/commands", async (
|
orchestratorGroup.MapGet("/commands", async (
|
||||||
@@ -696,7 +675,7 @@ orchestratorGroup.MapGet("/commands", async (
|
|||||||
}
|
}
|
||||||
|
|
||||||
var commands = await store.GetPendingCommandsAsync(tenant, connectorId.Trim(), runId, afterSequence, cancellationToken).ConfigureAwait(false);
|
var commands = await store.GetPendingCommandsAsync(tenant, connectorId.Trim(), runId, afterSequence, cancellationToken).ConfigureAwait(false);
|
||||||
return Results.Ok(commands);
|
return HttpResults.Ok(commands);
|
||||||
}).WithName("GetOrchestratorCommands");
|
}).WithName("GetOrchestratorCommands");
|
||||||
var observationsEndpoint = app.MapGet("/concelier/observations", async (
|
var observationsEndpoint = app.MapGet("/concelier/observations", async (
|
||||||
HttpContext context,
|
HttpContext context,
|
||||||
@@ -772,7 +751,7 @@ var observationsEndpoint = app.MapGet("/concelier/observations", async (
|
|||||||
result.NextCursor,
|
result.NextCursor,
|
||||||
result.HasMore);
|
result.HasMore);
|
||||||
|
|
||||||
return Results.Ok(response);
|
return HttpResults.Ok(response);
|
||||||
}).WithName("GetConcelierObservations");
|
}).WithName("GetConcelierObservations");
|
||||||
|
|
||||||
const int DefaultLnmPageSize = 50;
|
const int DefaultLnmPageSize = 50;
|
||||||
@@ -824,7 +803,7 @@ app.MapGet("/v1/lnm/linksets", async (
|
|||||||
items.Add(ToLnmResponse(linkset, includeConflicts.GetValueOrDefault(true), includeTimeline: false, includeObservations: false, summary));
|
items.Add(ToLnmResponse(linkset, includeConflicts.GetValueOrDefault(true), includeTimeline: false, includeObservations: false, summary));
|
||||||
}
|
}
|
||||||
|
|
||||||
return Results.Ok(new LnmLinksetPage(items, resolvedPage, resolvedPageSize, result.Total));
|
return HttpResults.Ok(new LnmLinksetPage(items, resolvedPage, resolvedPageSize, result.Total));
|
||||||
}).WithName("ListLnmLinksets");
|
}).WithName("ListLnmLinksets");
|
||||||
|
|
||||||
app.MapPost("/v1/lnm/linksets/search", async (
|
app.MapPost("/v1/lnm/linksets/search", async (
|
||||||
@@ -874,7 +853,7 @@ app.MapPost("/v1/lnm/linksets/search", async (
|
|||||||
summary));
|
summary));
|
||||||
}
|
}
|
||||||
|
|
||||||
return Results.Ok(new LnmLinksetPage(items, resolvedPage, resolvedPageSize, result.Total));
|
return HttpResults.Ok(new LnmLinksetPage(items, resolvedPage, resolvedPageSize, result.Total));
|
||||||
}).WithName("SearchLnmLinksets");
|
}).WithName("SearchLnmLinksets");
|
||||||
|
|
||||||
app.MapGet("/v1/lnm/linksets/{advisoryId}", async (
|
app.MapGet("/v1/lnm/linksets/{advisoryId}", async (
|
||||||
@@ -960,7 +939,7 @@ app.MapGet("/v1/lnm/linksets/{advisoryId}", async (
|
|||||||
var summary = await BuildObservationSummaryAsync(observationQueryService, tenant!, linkset, cancellationToken).ConfigureAwait(false);
|
var summary = await BuildObservationSummaryAsync(observationQueryService, tenant!, linkset, cancellationToken).ConfigureAwait(false);
|
||||||
var response = ToLnmResponse(linkset, includeConflicts, includeTimeline: false, includeObservations: includeObservations, summary, cached: fromCache);
|
var response = ToLnmResponse(linkset, includeConflicts, includeTimeline: false, includeObservations: includeObservations, summary, cached: fromCache);
|
||||||
|
|
||||||
return Results.Ok(response);
|
return HttpResults.Ok(response);
|
||||||
}).WithName("GetLnmLinkset");
|
}).WithName("GetLnmLinkset");
|
||||||
|
|
||||||
app.MapGet("/linksets", async (
|
app.MapGet("/linksets", async (
|
||||||
@@ -999,7 +978,7 @@ app.MapGet("/linksets", async (
|
|||||||
nextCursor = result.NextCursor
|
nextCursor = result.NextCursor
|
||||||
};
|
};
|
||||||
|
|
||||||
return Results.Ok(payload);
|
return HttpResults.Ok(payload);
|
||||||
}).WithName("ListLinksetsLegacy");
|
}).WithName("ListLinksetsLegacy");
|
||||||
|
|
||||||
if (authorityConfigured)
|
if (authorityConfigured)
|
||||||
@@ -1334,20 +1313,20 @@ var advisoryObservationsEndpoint = app.MapGet("/advisories/observations", async
|
|||||||
var query = context.Request.Query;
|
var query = context.Request.Query;
|
||||||
|
|
||||||
// Parse query parameters
|
// Parse query parameters
|
||||||
var aliases = query.TryGetValue("alias", out var aliasValues)
|
string[]? aliases = query.TryGetValue("alias", out var aliasValues)
|
||||||
? AdvisoryRawRequestMapper.NormalizeStrings(aliasValues)
|
? AdvisoryRawRequestMapper.NormalizeStrings(aliasValues).ToArray()
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
var purls = query.TryGetValue("purl", out var purlValues)
|
string[]? purls = query.TryGetValue("purl", out var purlValues)
|
||||||
? AdvisoryRawRequestMapper.NormalizeStrings(purlValues)
|
? AdvisoryRawRequestMapper.NormalizeStrings(purlValues).ToArray()
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
var cpes = query.TryGetValue("cpe", out var cpeValues)
|
string[]? cpes = query.TryGetValue("cpe", out var cpeValues)
|
||||||
? AdvisoryRawRequestMapper.NormalizeStrings(cpeValues)
|
? AdvisoryRawRequestMapper.NormalizeStrings(cpeValues).ToArray()
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
var observationIds = query.TryGetValue("id", out var idValues)
|
string[]? observationIds = query.TryGetValue("id", out var idValues)
|
||||||
? AdvisoryRawRequestMapper.NormalizeStrings(idValues)
|
? AdvisoryRawRequestMapper.NormalizeStrings(idValues).ToArray()
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
int? limit = null;
|
int? limit = null;
|
||||||
@@ -1428,14 +1407,14 @@ var advisoryLinksetsEndpoint = app.MapGet("/advisories/linksets", async (
|
|||||||
var query = context.Request.Query;
|
var query = context.Request.Query;
|
||||||
|
|
||||||
// Parse advisory IDs (alias values like CVE-*, GHSA-*)
|
// Parse advisory IDs (alias values like CVE-*, GHSA-*)
|
||||||
var advisoryIds = query.TryGetValue("advisoryId", out var advisoryIdValues)
|
string[]? advisoryIds = query.TryGetValue("advisoryId", out var advisoryIdValues)
|
||||||
? AdvisoryRawRequestMapper.NormalizeStrings(advisoryIdValues)
|
? AdvisoryRawRequestMapper.NormalizeStrings(advisoryIdValues).ToArray()
|
||||||
: (query.TryGetValue("alias", out var aliasValues)
|
: (query.TryGetValue("alias", out var aliasValues)
|
||||||
? AdvisoryRawRequestMapper.NormalizeStrings(aliasValues)
|
? AdvisoryRawRequestMapper.NormalizeStrings(aliasValues).ToArray()
|
||||||
: null);
|
: null);
|
||||||
|
|
||||||
var sources = query.TryGetValue("source", out var sourceValues)
|
string[]? sources = query.TryGetValue("source", out var sourceValues)
|
||||||
? AdvisoryRawRequestMapper.NormalizeStrings(sourceValues)
|
? AdvisoryRawRequestMapper.NormalizeStrings(sourceValues).ToArray()
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
int? limit = null;
|
int? limit = null;
|
||||||
@@ -1496,7 +1475,8 @@ var advisoryLinksetsEndpoint = app.MapGet("/advisories/linksets", async (
|
|||||||
linkset.Normalized.Purls,
|
linkset.Normalized.Purls,
|
||||||
linkset.Normalized.Cpes,
|
linkset.Normalized.Cpes,
|
||||||
linkset.Normalized.Versions,
|
linkset.Normalized.Versions,
|
||||||
null) // Ranges serialized differently
|
null, // Ranges serialized differently
|
||||||
|
null) // Severities not yet populated
|
||||||
: null,
|
: null,
|
||||||
false, // Not from cache
|
false, // Not from cache
|
||||||
Array.Empty<string>(),
|
Array.Empty<string>(),
|
||||||
@@ -1533,12 +1513,12 @@ var advisoryLinksetsExportEndpoint = app.MapGet("/advisories/linksets/export", a
|
|||||||
|
|
||||||
var query = context.Request.Query;
|
var query = context.Request.Query;
|
||||||
|
|
||||||
var advisoryIds = query.TryGetValue("advisoryId", out var advisoryIdValues)
|
string[]? advisoryIds = query.TryGetValue("advisoryId", out var advisoryIdValues)
|
||||||
? AdvisoryRawRequestMapper.NormalizeStrings(advisoryIdValues)
|
? AdvisoryRawRequestMapper.NormalizeStrings(advisoryIdValues).ToArray()
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
var sources = query.TryGetValue("source", out var sourceValues)
|
string[]? sources = query.TryGetValue("source", out var sourceValues)
|
||||||
? AdvisoryRawRequestMapper.NormalizeStrings(sourceValues)
|
? AdvisoryRawRequestMapper.NormalizeStrings(sourceValues).ToArray()
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
var options = new AdvisoryLinksetQueryOptions(tenant, advisoryIds, sources, 1000, null);
|
var options = new AdvisoryLinksetQueryOptions(tenant, advisoryIds, sources, 1000, null);
|
||||||
@@ -1634,7 +1614,7 @@ app.MapPost("/internal/events/observations/publish", async (
|
|||||||
published++;
|
published++;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Results.Ok(new { tenant, published, requestedCount = request.ObservationIds.Count, timestamp = timeProvider.GetUtcNow() });
|
return HttpResults.Ok(new { tenant, published, requestedCount = request.ObservationIds.Count, timestamp = timeProvider.GetUtcNow() });
|
||||||
}).WithName("PublishObservationEvents");
|
}).WithName("PublishObservationEvents");
|
||||||
|
|
||||||
// Internal endpoint for publishing linkset events to NATS/Redis.
|
// Internal endpoint for publishing linkset events to NATS/Redis.
|
||||||
@@ -1681,7 +1661,7 @@ app.MapPost("/internal/events/linksets/publish", async (
|
|||||||
published++;
|
published++;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Results.Ok(new { tenant, published, requestedCount = request.AdvisoryIds.Count, hasMore = result.HasMore, timestamp = timeProvider.GetUtcNow() });
|
return HttpResults.Ok(new { tenant, published, requestedCount = request.AdvisoryIds.Count, hasMore = result.HasMore, timestamp = timeProvider.GetUtcNow() });
|
||||||
}).WithName("PublishLinksetEvents");
|
}).WithName("PublishLinksetEvents");
|
||||||
|
|
||||||
var advisoryEvidenceEndpoint = app.MapGet("/vuln/evidence/advisories/{advisoryKey}", async (
|
var advisoryEvidenceEndpoint = app.MapGet("/vuln/evidence/advisories/{advisoryKey}", async (
|
||||||
@@ -1782,7 +1762,7 @@ var attestationVerifyEndpoint = app.MapPost("/internal/attestations/verify", asy
|
|||||||
request.PipelineVersion ?? evidenceOptions.PipelineVersion ?? "git:unknown"),
|
request.PipelineVersion ?? evidenceOptions.PipelineVersion ?? "git:unknown"),
|
||||||
cancellationToken).ConfigureAwait(false);
|
cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
return Results.Json(claims);
|
return HttpResults.Json(claims);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
@@ -1834,7 +1814,7 @@ var evidenceSnapshotEndpoint = app.MapGet("/obs/evidence/advisories/{advisoryKey
|
|||||||
TransparencyPath: File.Exists(transparencyPath) ? transparencyPath : null,
|
TransparencyPath: File.Exists(transparencyPath) ? transparencyPath : null,
|
||||||
PipelineVersion: options.PipelineVersion);
|
PipelineVersion: options.PipelineVersion);
|
||||||
|
|
||||||
return Results.Json(response);
|
return HttpResults.Json(response);
|
||||||
});
|
});
|
||||||
if (authorityConfigured)
|
if (authorityConfigured)
|
||||||
{
|
{
|
||||||
@@ -1898,7 +1878,7 @@ var evidenceAttestationEndpoint = app.MapGet("/obs/attestations/advisories/{advi
|
|||||||
TransparencyPath: File.Exists(transparencyPath) ? transparencyPath : null,
|
TransparencyPath: File.Exists(transparencyPath) ? transparencyPath : null,
|
||||||
PipelineVersion: options.PipelineVersion);
|
PipelineVersion: options.PipelineVersion);
|
||||||
|
|
||||||
return Results.Json(response);
|
return HttpResults.Json(response);
|
||||||
});
|
});
|
||||||
if (authorityConfigured)
|
if (authorityConfigured)
|
||||||
{
|
{
|
||||||
@@ -1927,7 +1907,7 @@ var incidentGetEndpoint = app.MapGet("/obs/incidents/advisories/{advisoryKey}",
|
|||||||
return Problem(context, "Incident not found", StatusCodes.Status404NotFound, ProblemTypes.NotFound, "No incident marker present.");
|
return Problem(context, "Incident not found", StatusCodes.Status404NotFound, ProblemTypes.NotFound, "No incident marker present.");
|
||||||
}
|
}
|
||||||
|
|
||||||
return Results.Json(status);
|
return HttpResults.Json(status);
|
||||||
});
|
});
|
||||||
if (authorityConfigured)
|
if (authorityConfigured)
|
||||||
{
|
{
|
||||||
@@ -1967,7 +1947,7 @@ var incidentUpsertEndpoint = app.MapPost("/obs/incidents/advisories/{advisoryKey
|
|||||||
cancellationToken).ConfigureAwait(false);
|
cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
var status = await IncidentFileStore.ReadAsync(evidenceOptions, tenant!, advisoryKey, timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
|
var status = await IncidentFileStore.ReadAsync(evidenceOptions, tenant!, advisoryKey, timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
|
||||||
return Results.Json(status);
|
return HttpResults.Json(status);
|
||||||
});
|
});
|
||||||
if (authorityConfigured)
|
if (authorityConfigured)
|
||||||
{
|
{
|
||||||
@@ -1989,7 +1969,7 @@ var incidentDeleteEndpoint = app.MapDelete("/obs/incidents/advisories/{advisoryK
|
|||||||
|
|
||||||
var evidenceOptions = concelierOptions.Value.Evidence ?? new ConcelierOptions.EvidenceBundleOptions();
|
var evidenceOptions = concelierOptions.Value.Evidence ?? new ConcelierOptions.EvidenceBundleOptions();
|
||||||
await IncidentFileStore.DeleteAsync(evidenceOptions, tenant!, advisoryKey, cancellationToken).ConfigureAwait(false);
|
await IncidentFileStore.DeleteAsync(evidenceOptions, tenant!, advisoryKey, cancellationToken).ConfigureAwait(false);
|
||||||
return Results.NoContent();
|
return HttpResults.NoContent();
|
||||||
});
|
});
|
||||||
if (authorityConfigured)
|
if (authorityConfigured)
|
||||||
{
|
{
|
||||||
@@ -2224,7 +2204,7 @@ var advisorySummaryEndpoint = app.MapGet("/advisories/summary", async (
|
|||||||
context.Response.Headers["X-Stella-Cache-Ttl"] = "0";
|
context.Response.Headers["X-Stella-Cache-Ttl"] = "0";
|
||||||
|
|
||||||
var response = AdvisorySummaryMapper.ToResponse(normalizedTenant, orderedItems, nextCursor, sortKey);
|
var response = AdvisorySummaryMapper.ToResponse(normalizedTenant, orderedItems, nextCursor, sortKey);
|
||||||
return Results.Ok(response);
|
return HttpResults.Ok(response);
|
||||||
}).WithName("GetAdvisoriesSummary");
|
}).WithName("GetAdvisoriesSummary");
|
||||||
|
|
||||||
// Evidence batch (component-centric) endpoint for graph overlays / evidence exports.
|
// Evidence batch (component-centric) endpoint for graph overlays / evidence exports.
|
||||||
@@ -2292,7 +2272,7 @@ app.MapPost("/v1/evidence/batch", async (
|
|||||||
responses.Add(responseItem);
|
responses.Add(responseItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Results.Ok(new EvidenceBatchResponse(responses));
|
return HttpResults.Ok(new EvidenceBatchResponse(responses));
|
||||||
}).WithName("GetEvidenceBatch");
|
}).WithName("GetEvidenceBatch");
|
||||||
|
|
||||||
if (authorityConfigured)
|
if (authorityConfigured)
|
||||||
@@ -2384,6 +2364,7 @@ if (authorityConfigured)
|
|||||||
|
|
||||||
app.MapGet("/concelier/advisories/{vulnerabilityKey}/replay", async (
|
app.MapGet("/concelier/advisories/{vulnerabilityKey}/replay", async (
|
||||||
string vulnerabilityKey,
|
string vulnerabilityKey,
|
||||||
|
HttpContext context,
|
||||||
DateTimeOffset? asOf,
|
DateTimeOffset? asOf,
|
||||||
[FromServices] IAdvisoryEventLog eventLog,
|
[FromServices] IAdvisoryEventLog eventLog,
|
||||||
CancellationToken cancellationToken) =>
|
CancellationToken cancellationToken) =>
|
||||||
@@ -2468,7 +2449,7 @@ var statementProvenanceEndpoint = app.MapPost("/events/statements/{statementId:g
|
|||||||
return Problem(context, "Statement not found", StatusCodes.Status404NotFound, ProblemTypes.NotFound, ex.Message);
|
return Problem(context, "Statement not found", StatusCodes.Status404NotFound, ProblemTypes.NotFound, ex.Message);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Results.Accepted($"/events/statements/{statementId}");
|
return HttpResults.Accepted($"/events/statements/{statementId}");
|
||||||
});
|
});
|
||||||
|
|
||||||
if (authorityConfigured)
|
if (authorityConfigured)
|
||||||
@@ -2509,7 +2490,7 @@ app.UseExceptionHandler(errorApp =>
|
|||||||
["traceId"] = Activity.Current?.TraceId.ToString() ?? context.TraceIdentifier,
|
["traceId"] = Activity.Current?.TraceId.ToString() ?? context.TraceIdentifier,
|
||||||
};
|
};
|
||||||
|
|
||||||
var problem = Results.Problem(
|
var problem = HttpResults.Problem(
|
||||||
detail: error?.Message,
|
detail: error?.Message,
|
||||||
instance: context.Request.Path,
|
instance: context.Request.Path,
|
||||||
statusCode: StatusCodes.Status500InternalServerError,
|
statusCode: StatusCodes.Status500InternalServerError,
|
||||||
@@ -2752,7 +2733,7 @@ IReadOnlyList<LnmLinksetTimeline> BuildTimeline(AdvisoryLinkset linkset, Linkset
|
|||||||
IResult JsonResult<T>(T value, int? statusCode = null)
|
IResult JsonResult<T>(T value, int? statusCode = null)
|
||||||
{
|
{
|
||||||
var payload = JsonSerializer.Serialize(value, JsonOptions);
|
var payload = JsonSerializer.Serialize(value, JsonOptions);
|
||||||
return Results.Content(payload, "application/json", Encoding.UTF8, statusCode);
|
return HttpResults.Content(payload, "application/json", Encoding.UTF8, statusCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
IResult Problem(HttpContext context, string title, int statusCode, string type, string? detail = null, IDictionary<string, object?>? extensions = null, string? errorCode = null)
|
IResult Problem(HttpContext context, string title, int statusCode, string type, string? detail = null, IDictionary<string, object?>? extensions = null, string? errorCode = null)
|
||||||
@@ -2789,7 +2770,7 @@ IResult Problem(HttpContext context, string title, int statusCode, string type,
|
|||||||
}
|
}
|
||||||
|
|
||||||
var payload = JsonSerializer.Serialize(problemDetails, JsonOptions);
|
var payload = JsonSerializer.Serialize(problemDetails, JsonOptions);
|
||||||
return Results.Content(payload, "application/problem+json", Encoding.UTF8, statusCode);
|
return HttpResults.Content(payload, "application/problem+json", Encoding.UTF8, statusCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool TryResolveTenant(HttpContext context, bool requireHeader, out string tenant, out IResult? error)
|
bool TryResolveTenant(HttpContext context, bool requireHeader, out string tenant, out IResult? error)
|
||||||
@@ -2833,14 +2814,14 @@ IResult? EnsureTenantAuthorized(HttpContext context, string tenant)
|
|||||||
|
|
||||||
if (enforceTenantAllowlist && !requiredTenants.Contains(tenant))
|
if (enforceTenantAllowlist && !requiredTenants.Contains(tenant))
|
||||||
{
|
{
|
||||||
return Results.Forbid();
|
return HttpResults.Forbid();
|
||||||
}
|
}
|
||||||
|
|
||||||
var principal = context.User;
|
var principal = context.User;
|
||||||
|
|
||||||
if (enforceAuthority && (principal?.Identity?.IsAuthenticated != true))
|
if (enforceAuthority && (principal?.Identity?.IsAuthenticated != true))
|
||||||
{
|
{
|
||||||
return Results.Unauthorized();
|
return HttpResults.Unauthorized();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (principal?.Identity?.IsAuthenticated == true)
|
if (principal?.Identity?.IsAuthenticated == true)
|
||||||
@@ -2848,18 +2829,18 @@ IResult? EnsureTenantAuthorized(HttpContext context, string tenant)
|
|||||||
var tenantClaim = principal.FindFirstValue(StellaOpsClaimTypes.Tenant);
|
var tenantClaim = principal.FindFirstValue(StellaOpsClaimTypes.Tenant);
|
||||||
if (string.IsNullOrWhiteSpace(tenantClaim))
|
if (string.IsNullOrWhiteSpace(tenantClaim))
|
||||||
{
|
{
|
||||||
return Results.Forbid();
|
return HttpResults.Forbid();
|
||||||
}
|
}
|
||||||
|
|
||||||
var normalizedClaim = tenantClaim.Trim().ToLowerInvariant();
|
var normalizedClaim = tenantClaim.Trim().ToLowerInvariant();
|
||||||
if (!string.Equals(normalizedClaim, tenant, StringComparison.Ordinal))
|
if (!string.Equals(normalizedClaim, tenant, StringComparison.Ordinal))
|
||||||
{
|
{
|
||||||
return Results.Forbid();
|
return HttpResults.Forbid();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (enforceTenantAllowlist && !requiredTenants.Contains(normalizedClaim))
|
if (enforceTenantAllowlist && !requiredTenants.Contains(normalizedClaim))
|
||||||
{
|
{
|
||||||
return Results.Forbid();
|
return HttpResults.Forbid();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3319,62 +3300,26 @@ app.MapGet("/health", ([FromServices] IOptions<ConcelierOptions> opts, [FromServ
|
|||||||
return JsonResult(response);
|
return JsonResult(response);
|
||||||
});
|
});
|
||||||
|
|
||||||
app.MapGet("/ready", async ([FromServices] IMongoDatabase database, [FromServices] StellaOps.Concelier.WebService.Diagnostics.ServiceStatus status, HttpContext context, CancellationToken cancellationToken) =>
|
app.MapGet("/ready", ([FromServices] StellaOps.Concelier.WebService.Diagnostics.ServiceStatus status, HttpContext context) =>
|
||||||
{
|
{
|
||||||
ApplyNoCache(context.Response);
|
ApplyNoCache(context.Response);
|
||||||
|
|
||||||
var stopwatch = Stopwatch.StartNew();
|
var snapshot = status.CreateSnapshot();
|
||||||
try
|
var uptimeSeconds = Math.Max((snapshot.CapturedAt - snapshot.StartedAt).TotalSeconds, 0d);
|
||||||
{
|
|
||||||
await database.RunCommandAsync((Command<BsonDocument>)"{ ping: 1 }", cancellationToken: cancellationToken).ConfigureAwait(false);
|
|
||||||
stopwatch.Stop();
|
|
||||||
status.RecordMongoCheck(success: true, latency: stopwatch.Elapsed, error: null);
|
|
||||||
|
|
||||||
var snapshot = status.CreateSnapshot();
|
var mongo = new MongoReadyHealth(
|
||||||
var uptimeSeconds = Math.Max((snapshot.CapturedAt - snapshot.StartedAt).TotalSeconds, 0d);
|
Status: "bypassed",
|
||||||
|
LatencyMs: null,
|
||||||
|
CheckedAt: snapshot.LastReadyCheckAt,
|
||||||
|
Error: "mongo disabled");
|
||||||
|
|
||||||
var mongo = new MongoReadyHealth(
|
var response = new ReadyDocument(
|
||||||
Status: "ready",
|
Status: "ready",
|
||||||
LatencyMs: snapshot.LastMongoLatency?.TotalMilliseconds,
|
StartedAt: snapshot.StartedAt,
|
||||||
CheckedAt: snapshot.LastReadyCheckAt,
|
UptimeSeconds: uptimeSeconds,
|
||||||
Error: null);
|
Mongo: mongo);
|
||||||
|
|
||||||
var response = new ReadyDocument(
|
return JsonResult(response);
|
||||||
Status: "ready",
|
|
||||||
StartedAt: snapshot.StartedAt,
|
|
||||||
UptimeSeconds: uptimeSeconds,
|
|
||||||
Mongo: mongo);
|
|
||||||
|
|
||||||
return JsonResult(response);
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
stopwatch.Stop();
|
|
||||||
status.RecordMongoCheck(success: false, latency: stopwatch.Elapsed, error: ex.Message);
|
|
||||||
|
|
||||||
var snapshot = status.CreateSnapshot();
|
|
||||||
var uptimeSeconds = Math.Max((snapshot.CapturedAt - snapshot.StartedAt).TotalSeconds, 0d);
|
|
||||||
|
|
||||||
var mongo = new MongoReadyHealth(
|
|
||||||
Status: "unready",
|
|
||||||
LatencyMs: snapshot.LastMongoLatency?.TotalMilliseconds,
|
|
||||||
CheckedAt: snapshot.LastReadyCheckAt,
|
|
||||||
Error: snapshot.LastMongoError ?? ex.Message);
|
|
||||||
|
|
||||||
var response = new ReadyDocument(
|
|
||||||
Status: "unready",
|
|
||||||
StartedAt: snapshot.StartedAt,
|
|
||||||
UptimeSeconds: uptimeSeconds,
|
|
||||||
Mongo: mongo);
|
|
||||||
|
|
||||||
var extensions = new Dictionary<string, object?>(StringComparer.Ordinal)
|
|
||||||
{
|
|
||||||
["mongoLatencyMs"] = snapshot.LastMongoLatency?.TotalMilliseconds,
|
|
||||||
["mongoError"] = snapshot.LastMongoError ?? ex.Message,
|
|
||||||
};
|
|
||||||
|
|
||||||
return Problem(context, "Mongo unavailable", StatusCodes.Status503ServiceUnavailable, ProblemTypes.ServiceUnavailable, snapshot.LastMongoError ?? ex.Message, extensions);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
app.MapGet("/diagnostics/aliases/{seed}", async (string seed, [FromServices] AliasGraphResolver resolver, HttpContext context, CancellationToken cancellationToken) =>
|
app.MapGet("/diagnostics/aliases/{seed}", async (string seed, [FromServices] AliasGraphResolver resolver, HttpContext context, CancellationToken cancellationToken) =>
|
||||||
@@ -3553,7 +3498,7 @@ var triggerJobEndpoint = app.MapPost("/jobs/{*jobKind}", async (string jobKind,
|
|||||||
JobMetrics.TriggerCounter.Add(1, tags);
|
JobMetrics.TriggerCounter.Add(1, tags);
|
||||||
if (result.Run is null)
|
if (result.Run is null)
|
||||||
{
|
{
|
||||||
return Results.StatusCode(StatusCodes.Status202Accepted);
|
return HttpResults.StatusCode(StatusCodes.Status202Accepted);
|
||||||
}
|
}
|
||||||
|
|
||||||
var acceptedRun = JobRunResponse.FromSnapshot(result.Run);
|
var acceptedRun = JobRunResponse.FromSnapshot(result.Run);
|
||||||
@@ -3638,7 +3583,7 @@ var concelierHealthEndpoint = app.MapGet("/obs/concelier/health", (
|
|||||||
Window: "5m",
|
Window: "5m",
|
||||||
UpdatedAt: now.ToString("O", CultureInfo.InvariantCulture));
|
UpdatedAt: now.ToString("O", CultureInfo.InvariantCulture));
|
||||||
|
|
||||||
return Results.Ok(payload);
|
return HttpResults.Ok(payload);
|
||||||
});
|
});
|
||||||
|
|
||||||
var concelierTimelineEndpoint = app.MapGet("/obs/concelier/timeline", async (
|
var concelierTimelineEndpoint = app.MapGet("/obs/concelier/timeline", async (
|
||||||
@@ -3702,7 +3647,7 @@ var concelierTimelineEndpoint = app.MapGet("/obs/concelier/timeline", async (
|
|||||||
context.Response.Headers["X-Next-Cursor"] = nextCursor.ToString(CultureInfo.InvariantCulture);
|
context.Response.Headers["X-Next-Cursor"] = nextCursor.ToString(CultureInfo.InvariantCulture);
|
||||||
logger.LogInformation("obs timeline emitted {Count} events for tenant {Tenant} starting at {StartId} next {Next}", events.Count, tenant, startId, nextCursor);
|
logger.LogInformation("obs timeline emitted {Count} events for tenant {Tenant} starting at {StartId} next {Next}", events.Count, tenant, startId, nextCursor);
|
||||||
|
|
||||||
return Results.Empty;
|
return HttpResults.Empty;
|
||||||
});
|
});
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -3774,7 +3719,7 @@ app.MapGet("/v1/signals/symbols", async (
|
|||||||
|
|
||||||
var result = await symbolProvider.QueryAsync(options, cancellationToken);
|
var result = await symbolProvider.QueryAsync(options, cancellationToken);
|
||||||
|
|
||||||
return Results.Ok(new SignalsSymbolQueryResponse(
|
return HttpResults.Ok(new SignalsSymbolQueryResponse(
|
||||||
Symbols: result.Symbols.Select(s => ToSymbolResponse(s)).ToList(),
|
Symbols: result.Symbols.Select(s => ToSymbolResponse(s)).ToList(),
|
||||||
TotalCount: result.TotalCount,
|
TotalCount: result.TotalCount,
|
||||||
HasMore: result.HasMore,
|
HasMore: result.HasMore,
|
||||||
@@ -3807,7 +3752,7 @@ app.MapGet("/v1/signals/symbols/advisory/{advisoryId}", async (
|
|||||||
|
|
||||||
var symbolSet = await symbolProvider.GetByAdvisoryAsync(tenant!, advisoryId.Trim(), cancellationToken);
|
var symbolSet = await symbolProvider.GetByAdvisoryAsync(tenant!, advisoryId.Trim(), cancellationToken);
|
||||||
|
|
||||||
return Results.Ok(ToSymbolSetResponse(symbolSet));
|
return HttpResults.Ok(ToSymbolSetResponse(symbolSet));
|
||||||
}).WithName("GetAffectedSymbolsByAdvisory");
|
}).WithName("GetAffectedSymbolsByAdvisory");
|
||||||
|
|
||||||
app.MapGet("/v1/signals/symbols/package/{*purl}", async (
|
app.MapGet("/v1/signals/symbols/package/{*purl}", async (
|
||||||
@@ -3831,7 +3776,7 @@ app.MapGet("/v1/signals/symbols/package/{*purl}", async (
|
|||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(purl))
|
if (string.IsNullOrWhiteSpace(purl))
|
||||||
{
|
{
|
||||||
return Problem(
|
return HttpResults.Problem(
|
||||||
statusCode: StatusCodes.Status400BadRequest,
|
statusCode: StatusCodes.Status400BadRequest,
|
||||||
title: "Package URL required",
|
title: "Package URL required",
|
||||||
detail: "The purl parameter is required.",
|
detail: "The purl parameter is required.",
|
||||||
@@ -3840,7 +3785,7 @@ app.MapGet("/v1/signals/symbols/package/{*purl}", async (
|
|||||||
|
|
||||||
var symbolSet = await symbolProvider.GetByPackageAsync(tenant!, purl.Trim(), cancellationToken);
|
var symbolSet = await symbolProvider.GetByPackageAsync(tenant!, purl.Trim(), cancellationToken);
|
||||||
|
|
||||||
return Results.Ok(ToSymbolSetResponse(symbolSet));
|
return HttpResults.Ok(ToSymbolSetResponse(symbolSet));
|
||||||
}).WithName("GetAffectedSymbolsByPackage");
|
}).WithName("GetAffectedSymbolsByPackage");
|
||||||
|
|
||||||
app.MapPost("/v1/signals/symbols/batch", async (
|
app.MapPost("/v1/signals/symbols/batch", async (
|
||||||
@@ -3864,7 +3809,7 @@ app.MapPost("/v1/signals/symbols/batch", async (
|
|||||||
|
|
||||||
if (request.AdvisoryIds is not { Count: > 0 })
|
if (request.AdvisoryIds is not { Count: > 0 })
|
||||||
{
|
{
|
||||||
return Problem(
|
return HttpResults.Problem(
|
||||||
statusCode: StatusCodes.Status400BadRequest,
|
statusCode: StatusCodes.Status400BadRequest,
|
||||||
title: "Advisory IDs required",
|
title: "Advisory IDs required",
|
||||||
detail: "At least one advisoryId is required in the batch request.",
|
detail: "At least one advisoryId is required in the batch request.",
|
||||||
@@ -3873,7 +3818,7 @@ app.MapPost("/v1/signals/symbols/batch", async (
|
|||||||
|
|
||||||
if (request.AdvisoryIds.Count > 100)
|
if (request.AdvisoryIds.Count > 100)
|
||||||
{
|
{
|
||||||
return Problem(
|
return HttpResults.Problem(
|
||||||
statusCode: StatusCodes.Status400BadRequest,
|
statusCode: StatusCodes.Status400BadRequest,
|
||||||
title: "Batch size exceeded",
|
title: "Batch size exceeded",
|
||||||
detail: "Maximum batch size is 100 advisory IDs.",
|
detail: "Maximum batch size is 100 advisory IDs.",
|
||||||
@@ -3887,7 +3832,7 @@ app.MapPost("/v1/signals/symbols/batch", async (
|
|||||||
kvp => kvp.Key,
|
kvp => kvp.Key,
|
||||||
kvp => ToSymbolSetResponse(kvp.Value)));
|
kvp => ToSymbolSetResponse(kvp.Value)));
|
||||||
|
|
||||||
return Results.Ok(response);
|
return HttpResults.Ok(response);
|
||||||
}).WithName("GetAffectedSymbolsBatch");
|
}).WithName("GetAffectedSymbolsBatch");
|
||||||
|
|
||||||
app.MapGet("/v1/signals/symbols/exists/{advisoryId}", async (
|
app.MapGet("/v1/signals/symbols/exists/{advisoryId}", async (
|
||||||
@@ -3916,7 +3861,7 @@ app.MapGet("/v1/signals/symbols/exists/{advisoryId}", async (
|
|||||||
|
|
||||||
var exists = await symbolProvider.HasSymbolsAsync(tenant!, advisoryId.Trim(), cancellationToken);
|
var exists = await symbolProvider.HasSymbolsAsync(tenant!, advisoryId.Trim(), cancellationToken);
|
||||||
|
|
||||||
return Results.Ok(new SignalsSymbolExistsResponse(Exists: exists, AdvisoryId: advisoryId.Trim()));
|
return HttpResults.Ok(new SignalsSymbolExistsResponse(Exists: exists, AdvisoryId: advisoryId.Trim()));
|
||||||
}).WithName("CheckAffectedSymbolsExist");
|
}).WithName("CheckAffectedSymbolsExist");
|
||||||
|
|
||||||
await app.RunAsync();
|
await app.RunAsync();
|
||||||
@@ -4076,41 +4021,7 @@ static SignalsSymbolSetResponse ToSymbolSetResponse(AffectedSymbolSet symbolSet)
|
|||||||
|
|
||||||
static async Task InitializeMongoAsync(WebApplication app)
|
static async Task InitializeMongoAsync(WebApplication app)
|
||||||
{
|
{
|
||||||
// Skip Mongo initialization in testing/bypass mode.
|
await Task.CompletedTask;
|
||||||
var isTesting = string.Equals(
|
|
||||||
Environment.GetEnvironmentVariable("DOTNET_ENVIRONMENT"),
|
|
||||||
"Testing",
|
|
||||||
StringComparison.OrdinalIgnoreCase);
|
|
||||||
var bypass = string.Equals(
|
|
||||||
Environment.GetEnvironmentVariable("CONCELIER_BYPASS_MONGO"),
|
|
||||||
"1",
|
|
||||||
StringComparison.OrdinalIgnoreCase);
|
|
||||||
if (isTesting || bypass)
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await using var scope = app.Services.CreateAsyncScope();
|
|
||||||
var bootstrapper = scope.ServiceProvider.GetRequiredService<MongoBootstrapper>();
|
|
||||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("MongoBootstrapper");
|
|
||||||
var status = scope.ServiceProvider.GetRequiredService<StellaOps.Concelier.WebService.Diagnostics.ServiceStatus>();
|
|
||||||
|
|
||||||
var stopwatch = Stopwatch.StartNew();
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
await bootstrapper.InitializeAsync(app.Lifetime.ApplicationStopping).ConfigureAwait(false);
|
|
||||||
stopwatch.Stop();
|
|
||||||
status.MarkBootstrapCompleted(stopwatch.Elapsed);
|
|
||||||
logger.LogInformation("Mongo bootstrap completed in {ElapsedMs} ms", stopwatch.Elapsed.TotalMilliseconds);
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
stopwatch.Stop();
|
|
||||||
status.RecordMongoCheck(success: false, latency: stopwatch.Elapsed, error: ex.Message);
|
|
||||||
logger.LogCritical(ex, "Mongo bootstrap failed after {ElapsedMs} ms", stopwatch.Elapsed.TotalMilliseconds);
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" Version="4.9.2" PrivateAssets="all" />
|
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" Version="4.9.2" PrivateAssets="all" />
|
||||||
<PackageReference Include="Microsoft.CodeAnalysis.Analyzers" Version="3.11.0" PrivateAssets="all" />
|
<PackageReference Include="Microsoft.CodeAnalysis.Analyzers" Version="3.11.0" PrivateAssets="all" />
|
||||||
|
<PackageReference Include="NETStandard.Library" Version="2.0.3" PrivateAssets="all" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
</Project>
|
</Project>
|
||||||
|
|||||||
@@ -18,8 +18,8 @@ using StellaOps.Concelier.Connector.Common.Fetch;
|
|||||||
using StellaOps.Concelier.Connector.Common.Html;
|
using StellaOps.Concelier.Connector.Common.Html;
|
||||||
using StellaOps.Concelier.Connector.Common;
|
using StellaOps.Concelier.Connector.Common;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Acsc.Internal;
|
namespace StellaOps.Concelier.Connector.Acsc.Internal;
|
||||||
|
|
||||||
|
|||||||
@@ -2,8 +2,8 @@ using System.Security.Cryptography;
|
|||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Acsc.Internal;
|
namespace StellaOps.Concelier.Connector.Acsc.Internal;
|
||||||
|
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Common;
|
|||||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Cccs;
|
namespace StellaOps.Concelier.Connector.Cccs;
|
||||||
@@ -145,13 +145,16 @@ public sealed class CccsConnector : IFeedConnector
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
var gridFsId = await _rawDocumentStorage.UploadAsync(
|
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||||
|
|
||||||
|
_ = await _rawDocumentStorage.UploadAsync(
|
||||||
SourceName,
|
SourceName,
|
||||||
documentUri,
|
documentUri,
|
||||||
payload,
|
payload,
|
||||||
"application/json",
|
"application/json",
|
||||||
expiresAt: null,
|
ExpiresAt: null,
|
||||||
cancellationToken).ConfigureAwait(false);
|
cancellationToken,
|
||||||
|
recordId).ConfigureAwait(false);
|
||||||
|
|
||||||
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||||
{
|
{
|
||||||
@@ -169,7 +172,6 @@ public sealed class CccsConnector : IFeedConnector
|
|||||||
metadata["cccs.alertType"] = rawDocument.AlertType!;
|
metadata["cccs.alertType"] = rawDocument.AlertType!;
|
||||||
}
|
}
|
||||||
|
|
||||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
|
||||||
var record = new DocumentRecord(
|
var record = new DocumentRecord(
|
||||||
recordId,
|
recordId,
|
||||||
SourceName,
|
SourceName,
|
||||||
@@ -182,8 +184,9 @@ public sealed class CccsConnector : IFeedConnector
|
|||||||
Metadata: metadata,
|
Metadata: metadata,
|
||||||
Etag: null,
|
Etag: null,
|
||||||
LastModified: rawDocument.Modified ?? rawDocument.Published ?? result.LastModifiedUtc,
|
LastModified: rawDocument.Modified ?? rawDocument.Published ?? result.LastModifiedUtc,
|
||||||
PayloadId: gridFsId,
|
PayloadId: recordId,
|
||||||
ExpiresAt: null);
|
ExpiresAt: null,
|
||||||
|
Payload: payload);
|
||||||
|
|
||||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||||
pendingDocuments.Add(upserted.Id);
|
pendingDocuments.Add(upserted.Id);
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Normalization.SemVer;
|
using StellaOps.Concelier.Normalization.SemVer;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Cccs.Internal;
|
namespace StellaOps.Concelier.Connector.Cccs.Internal;
|
||||||
|
|
||||||
internal static class CccsMapper
|
internal static class CccsMapper
|
||||||
{
|
{
|
||||||
@@ -110,149 +110,149 @@ internal static class CccsMapper
|
|||||||
.ToArray();
|
.ToArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IReadOnlyList<AffectedPackage> BuildPackages(CccsAdvisoryDto dto, DateTimeOffset recordedAt)
|
private static IReadOnlyList<AffectedPackage> BuildPackages(CccsAdvisoryDto dto, DateTimeOffset recordedAt)
|
||||||
{
|
{
|
||||||
if (dto.Products.Count == 0)
|
if (dto.Products.Count == 0)
|
||||||
{
|
{
|
||||||
return Array.Empty<AffectedPackage>();
|
return Array.Empty<AffectedPackage>();
|
||||||
}
|
}
|
||||||
|
|
||||||
var packages = new List<AffectedPackage>(dto.Products.Count);
|
var packages = new List<AffectedPackage>(dto.Products.Count);
|
||||||
for (var index = 0; index < dto.Products.Count; index++)
|
for (var index = 0; index < dto.Products.Count; index++)
|
||||||
{
|
{
|
||||||
var product = dto.Products[index];
|
var product = dto.Products[index];
|
||||||
if (string.IsNullOrWhiteSpace(product))
|
if (string.IsNullOrWhiteSpace(product))
|
||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
var identifier = product.Trim();
|
var identifier = product.Trim();
|
||||||
var provenance = new AdvisoryProvenance(
|
var provenance = new AdvisoryProvenance(
|
||||||
CccsConnectorPlugin.SourceName,
|
CccsConnectorPlugin.SourceName,
|
||||||
"package",
|
"package",
|
||||||
identifier,
|
identifier,
|
||||||
recordedAt,
|
recordedAt,
|
||||||
new[] { ProvenanceFieldMasks.AffectedPackages });
|
new[] { ProvenanceFieldMasks.AffectedPackages });
|
||||||
|
|
||||||
var rangeAnchor = $"cccs:{dto.SerialNumber}:{index}";
|
var rangeAnchor = $"cccs:{dto.SerialNumber}:{index}";
|
||||||
var versionRanges = BuildVersionRanges(product, rangeAnchor, recordedAt);
|
var versionRanges = BuildVersionRanges(product, rangeAnchor, recordedAt);
|
||||||
var normalizedVersions = BuildNormalizedVersions(versionRanges, rangeAnchor);
|
var normalizedVersions = BuildNormalizedVersions(versionRanges, rangeAnchor);
|
||||||
|
|
||||||
packages.Add(new AffectedPackage(
|
packages.Add(new AffectedPackage(
|
||||||
AffectedPackageTypes.Vendor,
|
AffectedPackageTypes.Vendor,
|
||||||
identifier,
|
identifier,
|
||||||
platform: null,
|
platform: null,
|
||||||
versionRanges: versionRanges,
|
versionRanges: versionRanges,
|
||||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||||
provenance: new[] { provenance },
|
provenance: new[] { provenance },
|
||||||
normalizedVersions: normalizedVersions));
|
normalizedVersions: normalizedVersions));
|
||||||
}
|
}
|
||||||
|
|
||||||
return packages.Count == 0
|
return packages.Count == 0
|
||||||
? Array.Empty<AffectedPackage>()
|
? Array.Empty<AffectedPackage>()
|
||||||
: packages
|
: packages
|
||||||
.DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
.DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||||
.OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
.OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||||
.ToArray();
|
.ToArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(string productText, string rangeAnchor, DateTimeOffset recordedAt)
|
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(string productText, string rangeAnchor, DateTimeOffset recordedAt)
|
||||||
{
|
{
|
||||||
var versionText = ExtractFirstVersionToken(productText);
|
var versionText = ExtractFirstVersionToken(productText);
|
||||||
if (string.IsNullOrWhiteSpace(versionText))
|
if (string.IsNullOrWhiteSpace(versionText))
|
||||||
{
|
{
|
||||||
return Array.Empty<AffectedVersionRange>();
|
return Array.Empty<AffectedVersionRange>();
|
||||||
}
|
}
|
||||||
|
|
||||||
var provenance = new AdvisoryProvenance(
|
var provenance = new AdvisoryProvenance(
|
||||||
CccsConnectorPlugin.SourceName,
|
CccsConnectorPlugin.SourceName,
|
||||||
"range",
|
"range",
|
||||||
rangeAnchor,
|
rangeAnchor,
|
||||||
recordedAt,
|
recordedAt,
|
||||||
new[] { ProvenanceFieldMasks.VersionRanges });
|
new[] { ProvenanceFieldMasks.VersionRanges });
|
||||||
|
|
||||||
var vendorExtensions = new Dictionary<string, string>
|
var vendorExtensions = new Dictionary<string, string>
|
||||||
{
|
{
|
||||||
["cccs.version.raw"] = versionText!,
|
["cccs.version.raw"] = versionText!,
|
||||||
["cccs.anchor"] = rangeAnchor,
|
["cccs.anchor"] = rangeAnchor,
|
||||||
};
|
};
|
||||||
|
|
||||||
var semVerResults = SemVerRangeRuleBuilder.Build(versionText!, patchedVersion: null, provenanceNote: rangeAnchor);
|
var semVerResults = SemVerRangeRuleBuilder.Build(versionText!, patchedVersion: null, provenanceNote: rangeAnchor);
|
||||||
if (semVerResults.Count > 0)
|
if (semVerResults.Count > 0)
|
||||||
{
|
{
|
||||||
return semVerResults.Select(result =>
|
return semVerResults.Select(result =>
|
||||||
new AffectedVersionRange(
|
new AffectedVersionRange(
|
||||||
rangeKind: NormalizedVersionSchemes.SemVer,
|
rangeKind: NormalizedVersionSchemes.SemVer,
|
||||||
introducedVersion: result.Primitive.Introduced,
|
introducedVersion: result.Primitive.Introduced,
|
||||||
fixedVersion: result.Primitive.Fixed,
|
fixedVersion: result.Primitive.Fixed,
|
||||||
lastAffectedVersion: result.Primitive.LastAffected,
|
lastAffectedVersion: result.Primitive.LastAffected,
|
||||||
rangeExpression: result.Expression ?? versionText!,
|
rangeExpression: result.Expression ?? versionText!,
|
||||||
provenance: provenance,
|
provenance: provenance,
|
||||||
primitives: new RangePrimitives(
|
primitives: new RangePrimitives(
|
||||||
result.Primitive,
|
result.Primitive,
|
||||||
Nevra: null,
|
Nevra: null,
|
||||||
Evr: null,
|
Evr: null,
|
||||||
VendorExtensions: vendorExtensions)))
|
VendorExtensions: vendorExtensions)))
|
||||||
.ToArray();
|
.ToArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
var primitives = new RangePrimitives(
|
var primitives = new RangePrimitives(
|
||||||
new SemVerPrimitive(
|
new SemVerPrimitive(
|
||||||
Introduced: versionText,
|
Introduced: versionText,
|
||||||
IntroducedInclusive: true,
|
IntroducedInclusive: true,
|
||||||
Fixed: null,
|
Fixed: null,
|
||||||
FixedInclusive: false,
|
FixedInclusive: false,
|
||||||
LastAffected: null,
|
LastAffected: null,
|
||||||
LastAffectedInclusive: true,
|
LastAffectedInclusive: true,
|
||||||
ConstraintExpression: null,
|
ConstraintExpression: null,
|
||||||
ExactValue: versionText),
|
ExactValue: versionText),
|
||||||
Nevra: null,
|
Nevra: null,
|
||||||
Evr: null,
|
Evr: null,
|
||||||
VendorExtensions: vendorExtensions);
|
VendorExtensions: vendorExtensions);
|
||||||
|
|
||||||
return new[]
|
return new[]
|
||||||
{
|
{
|
||||||
new AffectedVersionRange(
|
new AffectedVersionRange(
|
||||||
rangeKind: NormalizedVersionSchemes.SemVer,
|
rangeKind: NormalizedVersionSchemes.SemVer,
|
||||||
introducedVersion: null,
|
introducedVersion: null,
|
||||||
fixedVersion: null,
|
fixedVersion: null,
|
||||||
lastAffectedVersion: null,
|
lastAffectedVersion: null,
|
||||||
rangeExpression: versionText,
|
rangeExpression: versionText,
|
||||||
provenance: provenance,
|
provenance: provenance,
|
||||||
primitives: primitives),
|
primitives: primitives),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
||||||
IReadOnlyList<AffectedVersionRange> ranges,
|
IReadOnlyList<AffectedVersionRange> ranges,
|
||||||
string rangeAnchor)
|
string rangeAnchor)
|
||||||
{
|
{
|
||||||
if (ranges.Count == 0)
|
if (ranges.Count == 0)
|
||||||
{
|
{
|
||||||
return Array.Empty<NormalizedVersionRule>();
|
return Array.Empty<NormalizedVersionRule>();
|
||||||
}
|
}
|
||||||
|
|
||||||
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
||||||
foreach (var range in ranges)
|
foreach (var range in ranges)
|
||||||
{
|
{
|
||||||
var rule = range.ToNormalizedVersionRule(rangeAnchor);
|
var rule = range.ToNormalizedVersionRule(rangeAnchor);
|
||||||
if (rule is not null)
|
if (rule is not null)
|
||||||
{
|
{
|
||||||
rules.Add(rule);
|
rules.Add(rule);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules.ToArray();
|
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules.ToArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string? ExtractFirstVersionToken(string value)
|
private static string? ExtractFirstVersionToken(string value)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(value))
|
if (string.IsNullOrWhiteSpace(value))
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
var match = Regex.Match(value, @"\d+(?:\.\d+){0,3}(?:[A-Za-z0-9\-_]*)?");
|
var match = Regex.Match(value, @"\d+(?:\.\d+){0,3}(?:[A-Za-z0-9\-_]*)?");
|
||||||
return match.Success ? match.Value : null;
|
return match.Success ? match.Value : null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Common.Fetch;
|
|||||||
using StellaOps.Concelier.Connector.Common.Html;
|
using StellaOps.Concelier.Connector.Common.Html;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.CertBund;
|
namespace StellaOps.Concelier.Connector.CertBund;
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ using System.Collections.Generic;
|
|||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Normalization.SemVer;
|
using StellaOps.Concelier.Normalization.SemVer;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.CertBund.Internal;
|
namespace StellaOps.Concelier.Connector.CertBund.Internal;
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Common;
|
|||||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.CertCc;
|
namespace StellaOps.Concelier.Connector.CertCc;
|
||||||
|
|||||||
@@ -5,8 +5,8 @@ using System.Linq;
|
|||||||
using System.Net;
|
using System.Net;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.CertCc.Internal;
|
namespace StellaOps.Concelier.Connector.CertCc.Internal;
|
||||||
|
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ using StellaOps.Concelier.Connector.Common;
|
|||||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.CertFr;
|
namespace StellaOps.Concelier.Connector.CertFr;
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.CertFr.Internal;
|
namespace StellaOps.Concelier.Connector.CertFr.Internal;
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Common;
|
|||||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.CertIn;
|
namespace StellaOps.Concelier.Connector.CertIn;
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ public sealed record TimeWindowCursorState(DateTimeOffset? LastWindowStart, Date
|
|||||||
{
|
{
|
||||||
return value.BsonType switch
|
return value.BsonType switch
|
||||||
{
|
{
|
||||||
BsonType.DateTime => DateTime.SpecifyKind(value.ToUniversalTime(), DateTimeKind.Utc),
|
BsonType.DateTime => new DateTimeOffset(value.ToUniversalTime(), TimeSpan.Zero),
|
||||||
BsonType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(),
|
BsonType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(),
|
||||||
_ => null,
|
_ => null,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
using System.Collections.Concurrent;
|
using System.Collections.Concurrent;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Common.Fetch;
|
namespace StellaOps.Concelier.Connector.Common.Fetch;
|
||||||
|
|
||||||
@@ -9,6 +10,12 @@ namespace StellaOps.Concelier.Connector.Common.Fetch;
|
|||||||
public sealed class RawDocumentStorage
|
public sealed class RawDocumentStorage
|
||||||
{
|
{
|
||||||
private readonly ConcurrentDictionary<Guid, byte[]> _blobs = new();
|
private readonly ConcurrentDictionary<Guid, byte[]> _blobs = new();
|
||||||
|
private readonly IDocumentStore? _documentStore;
|
||||||
|
|
||||||
|
public RawDocumentStorage(IDocumentStore? documentStore = null)
|
||||||
|
{
|
||||||
|
_documentStore = documentStore;
|
||||||
|
}
|
||||||
|
|
||||||
public Task<Guid> UploadAsync(
|
public Task<Guid> UploadAsync(
|
||||||
string sourceName,
|
string sourceName,
|
||||||
@@ -16,7 +23,7 @@ public sealed class RawDocumentStorage
|
|||||||
byte[] content,
|
byte[] content,
|
||||||
string? contentType,
|
string? contentType,
|
||||||
CancellationToken cancellationToken)
|
CancellationToken cancellationToken)
|
||||||
=> UploadAsync(sourceName, uri, content, contentType, expiresAt: null, cancellationToken);
|
=> UploadAsync(sourceName, uri, content, contentType, ExpiresAt: null, cancellationToken);
|
||||||
|
|
||||||
public async Task<Guid> UploadAsync(
|
public async Task<Guid> UploadAsync(
|
||||||
string sourceName,
|
string sourceName,
|
||||||
@@ -39,11 +46,21 @@ public sealed class RawDocumentStorage
|
|||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task<byte[]> DownloadAsync(Guid id, CancellationToken cancellationToken)
|
public async Task<byte[]> DownloadAsync(Guid id, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
if (_blobs.TryGetValue(id, out var bytes))
|
if (_blobs.TryGetValue(id, out var bytes))
|
||||||
{
|
{
|
||||||
return Task.FromResult(bytes);
|
return bytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_documentStore is not null)
|
||||||
|
{
|
||||||
|
var record = await _documentStore.FindAsync(id, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (record?.Payload is { Length: > 0 })
|
||||||
|
{
|
||||||
|
_blobs[id] = record.Payload;
|
||||||
|
return record.Payload;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new FileNotFoundException($"Blob {id} not found.");
|
throw new FileNotFoundException($"Blob {id} not found.");
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -9,160 +9,160 @@ using StellaOps.Concelier.Connector.Common.Xml;
|
|||||||
using StellaOps.Concelier.Core.Aoc;
|
using StellaOps.Concelier.Core.Aoc;
|
||||||
using StellaOps.Concelier.Core.Linksets;
|
using StellaOps.Concelier.Core.Linksets;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Common.Http;
|
namespace StellaOps.Concelier.Connector.Common.Http;
|
||||||
|
|
||||||
public static class ServiceCollectionExtensions
|
public static class ServiceCollectionExtensions
|
||||||
{
|
{
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Registers a named HTTP client configured for a source connector with allowlisted hosts and sensible defaults.
|
/// Registers a named HTTP client configured for a source connector with allowlisted hosts and sensible defaults.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<SourceHttpClientOptions> configure)
|
public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<SourceHttpClientOptions> configure)
|
||||||
=> services.AddSourceHttpClient(name, (_, options) => configure(options));
|
=> services.AddSourceHttpClient(name, (_, options) => configure(options));
|
||||||
|
|
||||||
public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<IServiceProvider, SourceHttpClientOptions> configure)
|
public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<IServiceProvider, SourceHttpClientOptions> configure)
|
||||||
{
|
{
|
||||||
ArgumentNullException.ThrowIfNull(services);
|
ArgumentNullException.ThrowIfNull(services);
|
||||||
ArgumentException.ThrowIfNullOrEmpty(name);
|
ArgumentException.ThrowIfNullOrEmpty(name);
|
||||||
ArgumentNullException.ThrowIfNull(configure);
|
ArgumentNullException.ThrowIfNull(configure);
|
||||||
|
|
||||||
services.AddOptions<SourceHttpClientOptions>(name).Configure<IServiceProvider>((options, sp) =>
|
services.AddOptions<SourceHttpClientOptions>(name).Configure<IServiceProvider>((options, sp) =>
|
||||||
{
|
{
|
||||||
configure(sp, options);
|
configure(sp, options);
|
||||||
SourceHttpClientConfigurationBinder.Apply(sp, name, options);
|
SourceHttpClientConfigurationBinder.Apply(sp, name, options);
|
||||||
});
|
});
|
||||||
|
|
||||||
return services
|
return services
|
||||||
.AddHttpClient(name)
|
.AddHttpClient(name)
|
||||||
.ConfigureHttpClient((sp, client) =>
|
.ConfigureHttpClient((sp, client) =>
|
||||||
{
|
{
|
||||||
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name);
|
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name);
|
||||||
|
|
||||||
if (options.BaseAddress is not null)
|
if (options.BaseAddress is not null)
|
||||||
{
|
{
|
||||||
client.BaseAddress = options.BaseAddress;
|
client.BaseAddress = options.BaseAddress;
|
||||||
}
|
}
|
||||||
|
|
||||||
client.Timeout = options.Timeout;
|
client.Timeout = options.Timeout;
|
||||||
client.DefaultRequestHeaders.UserAgent.Clear();
|
client.DefaultRequestHeaders.UserAgent.Clear();
|
||||||
client.DefaultRequestHeaders.UserAgent.ParseAdd(options.UserAgent);
|
client.DefaultRequestHeaders.UserAgent.ParseAdd(options.UserAgent);
|
||||||
client.DefaultRequestVersion = options.RequestVersion;
|
client.DefaultRequestVersion = options.RequestVersion;
|
||||||
client.DefaultVersionPolicy = options.VersionPolicy;
|
client.DefaultVersionPolicy = options.VersionPolicy;
|
||||||
|
|
||||||
foreach (var header in options.DefaultRequestHeaders)
|
foreach (var header in options.DefaultRequestHeaders)
|
||||||
{
|
{
|
||||||
client.DefaultRequestHeaders.TryAddWithoutValidation(header.Key, header.Value);
|
client.DefaultRequestHeaders.TryAddWithoutValidation(header.Key, header.Value);
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.ConfigurePrimaryHttpMessageHandler((sp) =>
|
.ConfigurePrimaryHttpMessageHandler((sp) =>
|
||||||
{
|
{
|
||||||
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone();
|
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone();
|
||||||
var handler = new SocketsHttpHandler
|
var handler = new SocketsHttpHandler
|
||||||
{
|
{
|
||||||
AllowAutoRedirect = options.AllowAutoRedirect,
|
AllowAutoRedirect = options.AllowAutoRedirect,
|
||||||
AutomaticDecompression = DecompressionMethods.All,
|
AutomaticDecompression = DecompressionMethods.All,
|
||||||
EnableMultipleHttp2Connections = options.EnableMultipleHttp2Connections,
|
EnableMultipleHttp2Connections = options.EnableMultipleHttp2Connections,
|
||||||
};
|
};
|
||||||
options.ConfigureHandler?.Invoke(handler);
|
options.ConfigureHandler?.Invoke(handler);
|
||||||
ApplyProxySettings(handler, options);
|
ApplyProxySettings(handler, options);
|
||||||
|
|
||||||
if (options.ServerCertificateCustomValidation is not null)
|
if (options.ServerCertificateCustomValidation is not null)
|
||||||
{
|
{
|
||||||
handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, sslPolicyErrors) =>
|
handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, sslPolicyErrors) =>
|
||||||
{
|
{
|
||||||
X509Certificate2? certToValidate = certificate as X509Certificate2;
|
X509Certificate2? certToValidate = certificate as X509Certificate2;
|
||||||
X509Certificate2? disposable = null;
|
X509Certificate2? disposable = null;
|
||||||
if (certToValidate is null && certificate is not null)
|
if (certToValidate is null && certificate is not null)
|
||||||
{
|
{
|
||||||
disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert));
|
disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert));
|
||||||
certToValidate = disposable;
|
certToValidate = disposable;
|
||||||
}
|
}
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
return options.ServerCertificateCustomValidation(certToValidate, chain, sslPolicyErrors);
|
return options.ServerCertificateCustomValidation(certToValidate, chain, sslPolicyErrors);
|
||||||
}
|
}
|
||||||
finally
|
finally
|
||||||
{
|
{
|
||||||
disposable?.Dispose();
|
disposable?.Dispose();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
else if (options.TrustedRootCertificates.Count > 0 && handler.SslOptions.RemoteCertificateValidationCallback is null)
|
else if (options.TrustedRootCertificates.Count > 0 && handler.SslOptions.RemoteCertificateValidationCallback is null)
|
||||||
{
|
{
|
||||||
handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, errors) =>
|
handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, errors) =>
|
||||||
{
|
{
|
||||||
if (errors == SslPolicyErrors.None)
|
if (errors == SslPolicyErrors.None)
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (certificate is null)
|
if (certificate is null)
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
X509Certificate2? certToValidate = certificate as X509Certificate2;
|
X509Certificate2? certToValidate = certificate as X509Certificate2;
|
||||||
X509Certificate2? disposable = null;
|
X509Certificate2? disposable = null;
|
||||||
var trustedRootCopies = new X509Certificate2Collection();
|
var trustedRootCopies = new X509Certificate2Collection();
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
if (certToValidate is null)
|
if (certToValidate is null)
|
||||||
{
|
{
|
||||||
disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert));
|
disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert));
|
||||||
certToValidate = disposable;
|
certToValidate = disposable;
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach (var root in options.TrustedRootCertificates)
|
foreach (var root in options.TrustedRootCertificates)
|
||||||
{
|
{
|
||||||
trustedRootCopies.Add(new X509Certificate2(root.RawData));
|
trustedRootCopies.Add(new X509Certificate2(root.RawData));
|
||||||
}
|
}
|
||||||
|
|
||||||
using var customChain = new X509Chain();
|
using var customChain = new X509Chain();
|
||||||
customChain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust;
|
customChain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust;
|
||||||
customChain.ChainPolicy.CustomTrustStore.Clear();
|
customChain.ChainPolicy.CustomTrustStore.Clear();
|
||||||
customChain.ChainPolicy.CustomTrustStore.AddRange(trustedRootCopies);
|
customChain.ChainPolicy.CustomTrustStore.AddRange(trustedRootCopies);
|
||||||
customChain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck;
|
customChain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck;
|
||||||
customChain.ChainPolicy.VerificationFlags = X509VerificationFlags.NoFlag;
|
customChain.ChainPolicy.VerificationFlags = X509VerificationFlags.NoFlag;
|
||||||
|
|
||||||
if (chain is not null)
|
if (chain is not null)
|
||||||
{
|
{
|
||||||
foreach (var element in chain.ChainElements)
|
foreach (var element in chain.ChainElements)
|
||||||
{
|
{
|
||||||
customChain.ChainPolicy.ExtraStore.Add(element.Certificate);
|
customChain.ChainPolicy.ExtraStore.Add(element.Certificate);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return certToValidate is not null && customChain.Build(certToValidate);
|
return certToValidate is not null && customChain.Build(certToValidate);
|
||||||
}
|
}
|
||||||
finally
|
finally
|
||||||
{
|
{
|
||||||
foreach (X509Certificate2 root in trustedRootCopies)
|
foreach (X509Certificate2 root in trustedRootCopies)
|
||||||
{
|
{
|
||||||
root.Dispose();
|
root.Dispose();
|
||||||
}
|
}
|
||||||
|
|
||||||
disposable?.Dispose();
|
disposable?.Dispose();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
return handler;
|
return handler;
|
||||||
})
|
})
|
||||||
.AddHttpMessageHandler(sp =>
|
.AddHttpMessageHandler(sp =>
|
||||||
{
|
{
|
||||||
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone();
|
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone();
|
||||||
return new AllowlistedHttpMessageHandler(options);
|
return new AllowlistedHttpMessageHandler(options);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Registers shared helpers used by source connectors.
|
/// Registers shared helpers used by source connectors.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public static IServiceCollection AddSourceCommon(this IServiceCollection services)
|
public static IServiceCollection AddSourceCommon(this IServiceCollection services)
|
||||||
{
|
{
|
||||||
ArgumentNullException.ThrowIfNull(services);
|
ArgumentNullException.ThrowIfNull(services);
|
||||||
|
|
||||||
services.AddSingleton<Json.JsonSchemaValidator>();
|
services.AddSingleton<Json.JsonSchemaValidator>();
|
||||||
services.AddSingleton<Json.IJsonSchemaValidator>(sp => sp.GetRequiredService<Json.JsonSchemaValidator>());
|
services.AddSingleton<Json.IJsonSchemaValidator>(sp => sp.GetRequiredService<Json.JsonSchemaValidator>());
|
||||||
services.AddSingleton<XmlSchemaValidator>();
|
services.AddSingleton<XmlSchemaValidator>();
|
||||||
@@ -170,40 +170,40 @@ public static class ServiceCollectionExtensions
|
|||||||
services.AddSingleton<Fetch.IJitterSource, Fetch.CryptoJitterSource>();
|
services.AddSingleton<Fetch.IJitterSource, Fetch.CryptoJitterSource>();
|
||||||
services.AddConcelierAocGuards();
|
services.AddConcelierAocGuards();
|
||||||
services.AddConcelierLinksetMappers();
|
services.AddConcelierLinksetMappers();
|
||||||
services.TryAddSingleton<IDocumentStore, InMemoryDocumentStore>();
|
services.TryAddScoped<IDocumentStore, InMemoryDocumentStore>();
|
||||||
services.AddSingleton<Fetch.RawDocumentStorage>();
|
services.AddScoped<Fetch.RawDocumentStorage>();
|
||||||
services.AddSingleton<Fetch.SourceFetchService>();
|
services.AddScoped<Fetch.SourceFetchService>();
|
||||||
|
|
||||||
return services;
|
return services;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void ApplyProxySettings(SocketsHttpHandler handler, SourceHttpClientOptions options)
|
private static void ApplyProxySettings(SocketsHttpHandler handler, SourceHttpClientOptions options)
|
||||||
{
|
{
|
||||||
if (options.ProxyAddress is null)
|
if (options.ProxyAddress is null)
|
||||||
{
|
{
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
var proxy = new WebProxy(options.ProxyAddress)
|
var proxy = new WebProxy(options.ProxyAddress)
|
||||||
{
|
{
|
||||||
BypassProxyOnLocal = options.ProxyBypassOnLocal,
|
BypassProxyOnLocal = options.ProxyBypassOnLocal,
|
||||||
UseDefaultCredentials = options.ProxyUseDefaultCredentials,
|
UseDefaultCredentials = options.ProxyUseDefaultCredentials,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (options.ProxyBypassList.Count > 0)
|
if (options.ProxyBypassList.Count > 0)
|
||||||
{
|
{
|
||||||
proxy.BypassList = options.ProxyBypassList.ToArray();
|
proxy.BypassList = options.ProxyBypassList.ToArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!options.ProxyUseDefaultCredentials
|
if (!options.ProxyUseDefaultCredentials
|
||||||
&& !string.IsNullOrWhiteSpace(options.ProxyUsername))
|
&& !string.IsNullOrWhiteSpace(options.ProxyUsername))
|
||||||
{
|
{
|
||||||
proxy.Credentials = new NetworkCredential(
|
proxy.Credentials = new NetworkCredential(
|
||||||
options.ProxyUsername,
|
options.ProxyUsername,
|
||||||
options.ProxyPassword ?? string.Empty);
|
options.ProxyPassword ?? string.Empty);
|
||||||
}
|
}
|
||||||
|
|
||||||
handler.Proxy = proxy;
|
handler.Proxy = proxy;
|
||||||
handler.UseProxy = true;
|
handler.UseProxy = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -144,18 +144,21 @@ public sealed class SourceStateSeedProcessor
|
|||||||
|
|
||||||
var existing = await _documentStore.FindBySourceAndUriAsync(source, document.Uri, cancellationToken).ConfigureAwait(false);
|
var existing = await _documentStore.FindBySourceAndUriAsync(source, document.Uri, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
var recordId = document.DocumentId ?? existing?.Id ?? Guid.NewGuid();
|
||||||
|
|
||||||
if (existing?.PayloadId is { } oldGridId)
|
if (existing?.PayloadId is { } oldGridId)
|
||||||
{
|
{
|
||||||
await _rawDocumentStorage.DeleteAsync(oldGridId, cancellationToken).ConfigureAwait(false);
|
await _rawDocumentStorage.DeleteAsync(oldGridId, cancellationToken).ConfigureAwait(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
var gridId = await _rawDocumentStorage.UploadAsync(
|
_ = await _rawDocumentStorage.UploadAsync(
|
||||||
source,
|
source,
|
||||||
document.Uri,
|
document.Uri,
|
||||||
payload,
|
payload,
|
||||||
document.ContentType,
|
document.ContentType,
|
||||||
document.ExpiresAt,
|
document.ExpiresAt,
|
||||||
cancellationToken)
|
cancellationToken,
|
||||||
|
recordId)
|
||||||
.ConfigureAwait(false);
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
var headers = CloneDictionary(document.Headers);
|
var headers = CloneDictionary(document.Headers);
|
||||||
@@ -171,7 +174,7 @@ public sealed class SourceStateSeedProcessor
|
|||||||
var metadata = CloneDictionary(document.Metadata);
|
var metadata = CloneDictionary(document.Metadata);
|
||||||
|
|
||||||
var record = new MongoContracts.DocumentRecord(
|
var record = new MongoContracts.DocumentRecord(
|
||||||
document.DocumentId ?? existing?.Id ?? Guid.NewGuid(),
|
recordId,
|
||||||
source,
|
source,
|
||||||
document.Uri,
|
document.Uri,
|
||||||
document.FetchedAt ?? completedAt,
|
document.FetchedAt ?? completedAt,
|
||||||
@@ -182,8 +185,9 @@ public sealed class SourceStateSeedProcessor
|
|||||||
metadata,
|
metadata,
|
||||||
document.Etag,
|
document.Etag,
|
||||||
document.LastModified,
|
document.LastModified,
|
||||||
gridId,
|
recordId,
|
||||||
document.ExpiresAt);
|
document.ExpiresAt,
|
||||||
|
payload);
|
||||||
|
|
||||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Cve.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Cve.Internal;
|
using StellaOps.Concelier.Connector.Cve.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Cve;
|
namespace StellaOps.Concelier.Connector.Cve;
|
||||||
@@ -510,24 +510,7 @@ public sealed class CveConnector : IFeedConnector
|
|||||||
|
|
||||||
var sha256 = Convert.ToHexString(SHA256.HashData(payload)).ToLowerInvariant();
|
var sha256 = Convert.ToHexString(SHA256.HashData(payload)).ToLowerInvariant();
|
||||||
var lastModified = dto.Modified ?? dto.Published ?? now;
|
var lastModified = dto.Modified ?? dto.Published ?? now;
|
||||||
ObjectId gridId = ObjectId.Empty;
|
await _rawDocumentStorage.UploadAsync(SourceName, uri, payload, "application/json", ExpiresAt: null, cancellationToken, documentId).ConfigureAwait(false);
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if (existing?.PayloadId is ObjectId existingGrid && existingGrid != ObjectId.Empty)
|
|
||||||
{
|
|
||||||
gridId = existingGrid;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
gridId = await _rawDocumentStorage.UploadAsync(SourceName, uri, payload, "application/json", cancellationToken).ConfigureAwait(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
_logger.LogWarning(ex, "Unable to store CVE seed payload for {CveId}", dto.CveId);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||||
{
|
{
|
||||||
@@ -547,7 +530,8 @@ public sealed class CveConnector : IFeedConnector
|
|||||||
Metadata: metadata,
|
Metadata: metadata,
|
||||||
Etag: null,
|
Etag: null,
|
||||||
LastModified: lastModified,
|
LastModified: lastModified,
|
||||||
PayloadId: gridId);
|
PayloadId: documentId,
|
||||||
|
Payload: payload);
|
||||||
|
|
||||||
await _documentStore.UpsertAsync(document, cancellationToken).ConfigureAwait(false);
|
await _documentStore.UpsertAsync(document, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ using System.Collections.Generic;
|
|||||||
using System.Linq;
|
using System.Linq;
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Normalization.Cvss;
|
using StellaOps.Concelier.Normalization.Cvss;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using NuGet.Versioning;
|
using NuGet.Versioning;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Cve.Internal;
|
namespace StellaOps.Concelier.Connector.Cve.Internal;
|
||||||
|
|||||||
@@ -16,8 +16,8 @@ using StellaOps.Concelier.Connector.Distro.Debian.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Distro.Debian.Internal;
|
using StellaOps.Concelier.Connector.Distro.Debian.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Distro.Debian;
|
namespace StellaOps.Concelier.Connector.Distro.Debian;
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ internal sealed record DebianFetchCacheEntry(string? ETag, DateTimeOffset? LastM
|
|||||||
{
|
{
|
||||||
public static DebianFetchCacheEntry Empty { get; } = new(null, null);
|
public static DebianFetchCacheEntry Empty { get; } = new(null, null);
|
||||||
|
|
||||||
public static DebianFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
public static DebianFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||||
=> new(document.Etag, document.LastModified);
|
=> new(document.Etag, document.LastModified);
|
||||||
|
|
||||||
public static DebianFetchCacheEntry FromBson(BsonDocument document)
|
public static DebianFetchCacheEntry FromBson(BsonDocument document)
|
||||||
@@ -54,7 +54,7 @@ internal sealed record DebianFetchCacheEntry(string? ETag, DateTimeOffset? LastM
|
|||||||
return document;
|
return document;
|
||||||
}
|
}
|
||||||
|
|
||||||
public bool Matches(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
public bool Matches(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||||
{
|
{
|
||||||
if (document is null)
|
if (document is null)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ using System.Linq;
|
|||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Normalization.Distro;
|
using StellaOps.Concelier.Normalization.Distro;
|
||||||
using StellaOps.Concelier.Connector.Common;
|
using StellaOps.Concelier.Connector.Common;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Distro.Debian.Internal;
|
namespace StellaOps.Concelier.Connector.Distro.Debian.Internal;
|
||||||
|
|
||||||
@@ -146,24 +146,24 @@ internal static class DebianMapper
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
var provenance = new[] { BuildPackageProvenance(package, recordedAt) };
|
var provenance = new[] { BuildPackageProvenance(package, recordedAt) };
|
||||||
var ranges = BuildVersionRanges(package, recordedAt);
|
var ranges = BuildVersionRanges(package, recordedAt);
|
||||||
var normalizedVersions = BuildNormalizedVersions(package, ranges);
|
var normalizedVersions = BuildNormalizedVersions(package, ranges);
|
||||||
|
|
||||||
packages.Add(new AffectedPackage(
|
packages.Add(new AffectedPackage(
|
||||||
AffectedPackageTypes.Deb,
|
AffectedPackageTypes.Deb,
|
||||||
identifier: package.Package.Trim(),
|
identifier: package.Package.Trim(),
|
||||||
platform: package.Release,
|
platform: package.Release,
|
||||||
versionRanges: ranges,
|
versionRanges: ranges,
|
||||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||||
provenance: provenance,
|
provenance: provenance,
|
||||||
normalizedVersions: normalizedVersions));
|
normalizedVersions: normalizedVersions));
|
||||||
}
|
}
|
||||||
|
|
||||||
return packages;
|
return packages;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(DebianPackageStateDto package, DateTimeOffset recordedAt)
|
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(DebianPackageStateDto package, DateTimeOffset recordedAt)
|
||||||
{
|
{
|
||||||
var provenance = new AdvisoryProvenance(
|
var provenance = new AdvisoryProvenance(
|
||||||
DebianConnectorPlugin.SourceName,
|
DebianConnectorPlugin.SourceName,
|
||||||
@@ -255,40 +255,40 @@ internal static class DebianMapper
|
|||||||
parts.Add($"last:{lastAffected.Trim()}");
|
parts.Add($"last:{lastAffected.Trim()}");
|
||||||
}
|
}
|
||||||
|
|
||||||
return parts.Count == 0 ? null : string.Join(" ", parts);
|
return parts.Count == 0 ? null : string.Join(" ", parts);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
||||||
DebianPackageStateDto package,
|
DebianPackageStateDto package,
|
||||||
IReadOnlyList<AffectedVersionRange> ranges)
|
IReadOnlyList<AffectedVersionRange> ranges)
|
||||||
{
|
{
|
||||||
if (ranges.Count == 0)
|
if (ranges.Count == 0)
|
||||||
{
|
{
|
||||||
return Array.Empty<NormalizedVersionRule>();
|
return Array.Empty<NormalizedVersionRule>();
|
||||||
}
|
}
|
||||||
|
|
||||||
var note = string.IsNullOrWhiteSpace(package.Release)
|
var note = string.IsNullOrWhiteSpace(package.Release)
|
||||||
? null
|
? null
|
||||||
: $"debian:{package.Release.Trim()}";
|
: $"debian:{package.Release.Trim()}";
|
||||||
|
|
||||||
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
||||||
foreach (var range in ranges)
|
foreach (var range in ranges)
|
||||||
{
|
{
|
||||||
var rule = range.ToNormalizedVersionRule(note);
|
var rule = range.ToNormalizedVersionRule(note);
|
||||||
if (rule is not null)
|
if (rule is not null)
|
||||||
{
|
{
|
||||||
rules.Add(rule);
|
rules.Add(rule);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules;
|
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void AddExtension(IDictionary<string, string> extensions, string key, string? value)
|
private static void AddExtension(IDictionary<string, string> extensions, string key, string? value)
|
||||||
{
|
{
|
||||||
if (!string.IsNullOrWhiteSpace(value))
|
if (!string.IsNullOrWhiteSpace(value))
|
||||||
{
|
{
|
||||||
extensions[key] = value.Trim();
|
extensions[key] = value.Trim();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ using StellaOps.Concelier.Normalization.Cvss;
|
|||||||
using StellaOps.Concelier.Normalization.Distro;
|
using StellaOps.Concelier.Normalization.Distro;
|
||||||
using StellaOps.Concelier.Normalization.Identifiers;
|
using StellaOps.Concelier.Normalization.Identifiers;
|
||||||
using StellaOps.Concelier.Normalization.Text;
|
using StellaOps.Concelier.Normalization.Text;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Distro.RedHat.Internal;
|
namespace StellaOps.Concelier.Connector.Distro.RedHat.Internal;
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Distro.RedHat.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Distro.RedHat.Internal;
|
using StellaOps.Concelier.Connector.Distro.RedHat.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Distro.RedHat;
|
namespace StellaOps.Concelier.Connector.Distro.RedHat;
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ internal sealed record SuseFetchCacheEntry(string? ETag, DateTimeOffset? LastMod
|
|||||||
{
|
{
|
||||||
public static SuseFetchCacheEntry Empty { get; } = new(null, null);
|
public static SuseFetchCacheEntry Empty { get; } = new(null, null);
|
||||||
|
|
||||||
public static SuseFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
public static SuseFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||||
=> new(document.Etag, document.LastModified);
|
=> new(document.Etag, document.LastModified);
|
||||||
|
|
||||||
public static SuseFetchCacheEntry FromBson(BsonDocument document)
|
public static SuseFetchCacheEntry FromBson(BsonDocument document)
|
||||||
@@ -54,7 +54,7 @@ internal sealed record SuseFetchCacheEntry(string? ETag, DateTimeOffset? LastMod
|
|||||||
return document;
|
return document;
|
||||||
}
|
}
|
||||||
|
|
||||||
public bool Matches(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
public bool Matches(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||||
{
|
{
|
||||||
if (document is null)
|
if (document is null)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ using System.Globalization;
|
|||||||
using System.Linq;
|
using System.Linq;
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Normalization.Distro;
|
using StellaOps.Concelier.Normalization.Distro;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Distro.Suse.Internal;
|
namespace StellaOps.Concelier.Connector.Distro.Suse.Internal;
|
||||||
|
|
||||||
@@ -160,16 +160,16 @@ internal static class SuseMapper
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
var normalizedVersions = BuildNormalizedVersions(package, ranges);
|
var normalizedVersions = BuildNormalizedVersions(package, ranges);
|
||||||
|
|
||||||
packages.Add(new AffectedPackage(
|
packages.Add(new AffectedPackage(
|
||||||
AffectedPackageTypes.Rpm,
|
AffectedPackageTypes.Rpm,
|
||||||
identifier: nevra!.ToCanonicalString(),
|
identifier: nevra!.ToCanonicalString(),
|
||||||
platform: package.Platform,
|
platform: package.Platform,
|
||||||
versionRanges: ranges,
|
versionRanges: ranges,
|
||||||
statuses: BuildStatuses(package, affectedProvenance),
|
statuses: BuildStatuses(package, affectedProvenance),
|
||||||
provenance: new[] { affectedProvenance },
|
provenance: new[] { affectedProvenance },
|
||||||
normalizedVersions: normalizedVersions));
|
normalizedVersions: normalizedVersions));
|
||||||
}
|
}
|
||||||
|
|
||||||
return packages.Count == 0
|
return packages.Count == 0
|
||||||
@@ -293,7 +293,7 @@ internal static class SuseMapper
|
|||||||
return !string.IsNullOrWhiteSpace(version) && !string.IsNullOrWhiteSpace(release);
|
return !string.IsNullOrWhiteSpace(version) && !string.IsNullOrWhiteSpace(release);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string? BuildRangeExpression(string? introduced, string? fixedVersion, string? lastAffected)
|
private static string? BuildRangeExpression(string? introduced, string? fixedVersion, string? lastAffected)
|
||||||
{
|
{
|
||||||
var parts = new List<string>(3);
|
var parts = new List<string>(3);
|
||||||
if (!string.IsNullOrWhiteSpace(introduced))
|
if (!string.IsNullOrWhiteSpace(introduced))
|
||||||
@@ -311,32 +311,32 @@ internal static class SuseMapper
|
|||||||
parts.Add($"last:{lastAffected}");
|
parts.Add($"last:{lastAffected}");
|
||||||
}
|
}
|
||||||
|
|
||||||
return parts.Count == 0 ? null : string.Join(" ", parts);
|
return parts.Count == 0 ? null : string.Join(" ", parts);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
||||||
SusePackageStateDto package,
|
SusePackageStateDto package,
|
||||||
IReadOnlyList<AffectedVersionRange> ranges)
|
IReadOnlyList<AffectedVersionRange> ranges)
|
||||||
{
|
{
|
||||||
if (ranges.Count == 0)
|
if (ranges.Count == 0)
|
||||||
{
|
{
|
||||||
return Array.Empty<NormalizedVersionRule>();
|
return Array.Empty<NormalizedVersionRule>();
|
||||||
}
|
}
|
||||||
|
|
||||||
var note = string.IsNullOrWhiteSpace(package.Platform)
|
var note = string.IsNullOrWhiteSpace(package.Platform)
|
||||||
? null
|
? null
|
||||||
: $"suse:{package.Platform.Trim()}";
|
: $"suse:{package.Platform.Trim()}";
|
||||||
|
|
||||||
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
||||||
foreach (var range in ranges)
|
foreach (var range in ranges)
|
||||||
{
|
{
|
||||||
var rule = range.ToNormalizedVersionRule(note);
|
var rule = range.ToNormalizedVersionRule(note);
|
||||||
if (rule is not null)
|
if (rule is not null)
|
||||||
{
|
{
|
||||||
rules.Add(rule);
|
rules.Add(rule);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules;
|
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,8 +18,8 @@ using StellaOps.Concelier.Connector.Distro.Suse.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Distro.Suse.Internal;
|
using StellaOps.Concelier.Connector.Distro.Suse.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Distro.Suse;
|
namespace StellaOps.Concelier.Connector.Distro.Suse;
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ internal sealed record UbuntuFetchCacheEntry(string? ETag, DateTimeOffset? LastM
|
|||||||
{
|
{
|
||||||
public static UbuntuFetchCacheEntry Empty { get; } = new(null, null);
|
public static UbuntuFetchCacheEntry Empty { get; } = new(null, null);
|
||||||
|
|
||||||
public static UbuntuFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
public static UbuntuFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||||
=> new(document.Etag, document.LastModified);
|
=> new(document.Etag, document.LastModified);
|
||||||
|
|
||||||
public static UbuntuFetchCacheEntry FromBson(BsonDocument document)
|
public static UbuntuFetchCacheEntry FromBson(BsonDocument document)
|
||||||
@@ -54,7 +54,7 @@ internal sealed record UbuntuFetchCacheEntry(string? ETag, DateTimeOffset? LastM
|
|||||||
return doc;
|
return doc;
|
||||||
}
|
}
|
||||||
|
|
||||||
public bool Matches(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
public bool Matches(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||||
{
|
{
|
||||||
if (document is null)
|
if (document is null)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ using System.Collections.Generic;
|
|||||||
using System.Linq;
|
using System.Linq;
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Normalization.Distro;
|
using StellaOps.Concelier.Normalization.Distro;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Distro.Ubuntu.Internal;
|
namespace StellaOps.Concelier.Connector.Distro.Ubuntu.Internal;
|
||||||
|
|
||||||
@@ -162,11 +162,11 @@ internal static class UbuntuMapper
|
|||||||
["ubuntu.pocket"] = package.Pocket ?? string.Empty
|
["ubuntu.pocket"] = package.Pocket ?? string.Empty
|
||||||
};
|
};
|
||||||
|
|
||||||
var range = new AffectedVersionRange(
|
var range = new AffectedVersionRange(
|
||||||
rangeKind: "evr",
|
rangeKind: "evr",
|
||||||
introducedVersion: null,
|
introducedVersion: null,
|
||||||
fixedVersion: package.Version,
|
fixedVersion: package.Version,
|
||||||
lastAffectedVersion: null,
|
lastAffectedVersion: null,
|
||||||
rangeExpression: rangeExpression,
|
rangeExpression: rangeExpression,
|
||||||
provenance: rangeProvenance,
|
provenance: rangeProvenance,
|
||||||
primitives: new RangePrimitives(
|
primitives: new RangePrimitives(
|
||||||
@@ -178,27 +178,27 @@ internal static class UbuntuMapper
|
|||||||
LastAffected: null),
|
LastAffected: null),
|
||||||
VendorExtensions: extensions));
|
VendorExtensions: extensions));
|
||||||
|
|
||||||
var statuses = new[]
|
var statuses = new[]
|
||||||
{
|
{
|
||||||
new AffectedPackageStatus(DetermineStatus(package), provenance)
|
new AffectedPackageStatus(DetermineStatus(package), provenance)
|
||||||
};
|
};
|
||||||
|
|
||||||
var normalizedNote = string.IsNullOrWhiteSpace(package.Release)
|
var normalizedNote = string.IsNullOrWhiteSpace(package.Release)
|
||||||
? null
|
? null
|
||||||
: $"ubuntu:{package.Release.Trim()}";
|
: $"ubuntu:{package.Release.Trim()}";
|
||||||
var normalizedRule = range.ToNormalizedVersionRule(normalizedNote);
|
var normalizedRule = range.ToNormalizedVersionRule(normalizedNote);
|
||||||
var normalizedVersions = normalizedRule is null
|
var normalizedVersions = normalizedRule is null
|
||||||
? Array.Empty<NormalizedVersionRule>()
|
? Array.Empty<NormalizedVersionRule>()
|
||||||
: new[] { normalizedRule };
|
: new[] { normalizedRule };
|
||||||
|
|
||||||
list.Add(new AffectedPackage(
|
list.Add(new AffectedPackage(
|
||||||
type: AffectedPackageTypes.Deb,
|
type: AffectedPackageTypes.Deb,
|
||||||
identifier: package.Package,
|
identifier: package.Package,
|
||||||
platform: package.Release,
|
platform: package.Release,
|
||||||
versionRanges: new[] { range },
|
versionRanges: new[] { range },
|
||||||
statuses: statuses,
|
statuses: statuses,
|
||||||
provenance: new[] { provenance },
|
provenance: new[] { provenance },
|
||||||
normalizedVersions: normalizedVersions));
|
normalizedVersions: normalizedVersions));
|
||||||
}
|
}
|
||||||
|
|
||||||
return list.Count == 0
|
return list.Count == 0
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ using StellaOps.Concelier.Connector.Distro.Ubuntu.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Distro.Ubuntu.Internal;
|
using StellaOps.Concelier.Connector.Distro.Ubuntu.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
using StellaOps.Cryptography;
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ using StellaOps.Concelier.Connector.Ghsa.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Ghsa.Internal;
|
using StellaOps.Concelier.Connector.Ghsa.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Ghsa;
|
namespace StellaOps.Concelier.Connector.Ghsa;
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ using System.Text;
|
|||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Normalization.Cvss;
|
using StellaOps.Concelier.Normalization.Cvss;
|
||||||
using StellaOps.Concelier.Normalization.SemVer;
|
using StellaOps.Concelier.Normalization.SemVer;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Ghsa.Internal;
|
namespace StellaOps.Concelier.Connector.Ghsa.Internal;
|
||||||
|
|
||||||
|
|||||||
@@ -25,8 +25,8 @@ using StellaOps.Concelier.Connector.Ics.Cisa.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Ics.Cisa.Internal;
|
using StellaOps.Concelier.Connector.Ics.Cisa.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Normalization.SemVer;
|
using StellaOps.Concelier.Normalization.SemVer;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Ics.Kaspersky.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Ics.Kaspersky.Internal;
|
using StellaOps.Concelier.Connector.Ics.Kaspersky.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Ics.Kaspersky;
|
namespace StellaOps.Concelier.Connector.Ics.Kaspersky;
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ using StellaOps.Concelier.Connector.Common;
|
|||||||
using StellaOps.Concelier.Normalization.Cvss;
|
using StellaOps.Concelier.Normalization.Cvss;
|
||||||
using StellaOps.Concelier.Normalization.Identifiers;
|
using StellaOps.Concelier.Normalization.Identifiers;
|
||||||
using StellaOps.Concelier.Normalization.Text;
|
using StellaOps.Concelier.Normalization.Text;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.JpFlags;
|
using StellaOps.Concelier.Storage.Mongo.JpFlags;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Jvn.Internal;
|
namespace StellaOps.Concelier.Connector.Jvn.Internal;
|
||||||
@@ -156,12 +156,12 @@ internal static class JvnAdvisoryMapper
|
|||||||
{
|
{
|
||||||
var packages = new List<AffectedPackage>();
|
var packages = new List<AffectedPackage>();
|
||||||
|
|
||||||
foreach (var product in detail.Affected)
|
foreach (var product in detail.Affected)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(product.Cpe))
|
if (string.IsNullOrWhiteSpace(product.Cpe))
|
||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(product.Status) && !product.Status.StartsWith("vulnerable", StringComparison.OrdinalIgnoreCase))
|
if (!string.IsNullOrWhiteSpace(product.Status) && !product.Status.StartsWith("vulnerable", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
@@ -200,66 +200,66 @@ internal static class JvnAdvisoryMapper
|
|||||||
|
|
||||||
var platform = product.Vendor ?? product.CpeVendor;
|
var platform = product.Vendor ?? product.CpeVendor;
|
||||||
|
|
||||||
var versionRanges = BuildVersionRanges(product, recordedAt, provenance[0]);
|
var versionRanges = BuildVersionRanges(product, recordedAt, provenance[0]);
|
||||||
|
|
||||||
packages.Add(new AffectedPackage(
|
packages.Add(new AffectedPackage(
|
||||||
AffectedPackageTypes.Cpe,
|
AffectedPackageTypes.Cpe,
|
||||||
cpe!,
|
cpe!,
|
||||||
platform: platform,
|
platform: platform,
|
||||||
versionRanges: versionRanges,
|
versionRanges: versionRanges,
|
||||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||||
provenance: provenance.ToArray()));
|
provenance: provenance.ToArray()));
|
||||||
}
|
}
|
||||||
|
|
||||||
return packages;
|
return packages;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(JvnAffectedProductDto product, DateTimeOffset recordedAt, AdvisoryProvenance provenance)
|
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(JvnAffectedProductDto product, DateTimeOffset recordedAt, AdvisoryProvenance provenance)
|
||||||
{
|
{
|
||||||
var extensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
var extensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||||
if (!string.IsNullOrWhiteSpace(product.Version))
|
if (!string.IsNullOrWhiteSpace(product.Version))
|
||||||
{
|
{
|
||||||
extensions["jvn.version"] = product.Version!;
|
extensions["jvn.version"] = product.Version!;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(product.Build))
|
if (!string.IsNullOrWhiteSpace(product.Build))
|
||||||
{
|
{
|
||||||
extensions["jvn.build"] = product.Build!;
|
extensions["jvn.build"] = product.Build!;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(product.Description))
|
if (!string.IsNullOrWhiteSpace(product.Description))
|
||||||
{
|
{
|
||||||
extensions["jvn.description"] = product.Description!;
|
extensions["jvn.description"] = product.Description!;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(product.Status))
|
if (!string.IsNullOrWhiteSpace(product.Status))
|
||||||
{
|
{
|
||||||
extensions["jvn.status"] = product.Status!;
|
extensions["jvn.status"] = product.Status!;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (extensions.Count == 0)
|
if (extensions.Count == 0)
|
||||||
{
|
{
|
||||||
return Array.Empty<AffectedVersionRange>();
|
return Array.Empty<AffectedVersionRange>();
|
||||||
}
|
}
|
||||||
|
|
||||||
var primitives = new RangePrimitives(
|
var primitives = new RangePrimitives(
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
extensions);
|
extensions);
|
||||||
|
|
||||||
var expression = product.Version;
|
var expression = product.Version;
|
||||||
var range = new AffectedVersionRange(
|
var range = new AffectedVersionRange(
|
||||||
rangeKind: "cpe",
|
rangeKind: "cpe",
|
||||||
introducedVersion: null,
|
introducedVersion: null,
|
||||||
fixedVersion: null,
|
fixedVersion: null,
|
||||||
lastAffectedVersion: null,
|
lastAffectedVersion: null,
|
||||||
rangeExpression: string.IsNullOrWhiteSpace(expression) ? null : expression,
|
rangeExpression: string.IsNullOrWhiteSpace(expression) ? null : expression,
|
||||||
provenance: provenance,
|
provenance: provenance,
|
||||||
primitives: primitives);
|
primitives: primitives);
|
||||||
|
|
||||||
return new[] { range };
|
return new[] { range };
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IReadOnlyList<CvssMetric> BuildCvss(JvnDetailDto detail, DateTimeOffset recordedAt, out string? severity)
|
private static IReadOnlyList<CvssMetric> BuildCvss(JvnDetailDto detail, DateTimeOffset recordedAt, out string? severity)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ using StellaOps.Concelier.Connector.Jvn.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Jvn.Internal;
|
using StellaOps.Concelier.Connector.Jvn.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.JpFlags;
|
using StellaOps.Concelier.Storage.Mongo.JpFlags;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
|
|||||||
@@ -16,8 +16,8 @@ using StellaOps.Concelier.Connector.Kev.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Kev.Internal;
|
using StellaOps.Concelier.Connector.Kev.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Kev;
|
namespace StellaOps.Concelier.Connector.Kev;
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Kisa.Internal;
|
namespace StellaOps.Concelier.Connector.Kisa.Internal;
|
||||||
|
|
||||||
internal static class KisaMapper
|
internal static class KisaMapper
|
||||||
{
|
{
|
||||||
public static Advisory Map(KisaParsedAdvisory dto, DocumentRecord document, DateTimeOffset recordedAt)
|
public static Advisory Map(KisaParsedAdvisory dto, DocumentRecord document, DateTimeOffset recordedAt)
|
||||||
@@ -97,410 +97,410 @@ internal static class KisaMapper
|
|||||||
}
|
}
|
||||||
|
|
||||||
var packages = new List<AffectedPackage>(dto.Products.Count);
|
var packages = new List<AffectedPackage>(dto.Products.Count);
|
||||||
foreach (var product in dto.Products)
|
foreach (var product in dto.Products)
|
||||||
{
|
{
|
||||||
var vendor = string.IsNullOrWhiteSpace(product.Vendor) ? "Unknown" : product.Vendor!;
|
var vendor = string.IsNullOrWhiteSpace(product.Vendor) ? "Unknown" : product.Vendor!;
|
||||||
var name = product.Name;
|
var name = product.Name;
|
||||||
var identifier = string.IsNullOrWhiteSpace(name) ? vendor : $"{vendor} {name}";
|
var identifier = string.IsNullOrWhiteSpace(name) ? vendor : $"{vendor} {name}";
|
||||||
var normalizedIdentifier = CreateSlug(identifier);
|
var normalizedIdentifier = CreateSlug(identifier);
|
||||||
var rangeProvenanceKey = $"kisa:{dto.AdvisoryId}:{normalizedIdentifier}";
|
var rangeProvenanceKey = $"kisa:{dto.AdvisoryId}:{normalizedIdentifier}";
|
||||||
|
|
||||||
var artifacts = BuildVersionArtifacts(product, rangeProvenanceKey, recordedAt);
|
var artifacts = BuildVersionArtifacts(product, rangeProvenanceKey, recordedAt);
|
||||||
var fieldMasks = new HashSet<string>(StringComparer.Ordinal)
|
var fieldMasks = new HashSet<string>(StringComparer.Ordinal)
|
||||||
{
|
{
|
||||||
ProvenanceFieldMasks.AffectedPackages
|
ProvenanceFieldMasks.AffectedPackages
|
||||||
};
|
};
|
||||||
|
|
||||||
if (artifacts.Ranges.Count > 0)
|
if (artifacts.Ranges.Count > 0)
|
||||||
{
|
{
|
||||||
fieldMasks.Add(ProvenanceFieldMasks.VersionRanges);
|
fieldMasks.Add(ProvenanceFieldMasks.VersionRanges);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (artifacts.NormalizedVersions.Count > 0)
|
if (artifacts.NormalizedVersions.Count > 0)
|
||||||
{
|
{
|
||||||
fieldMasks.Add(ProvenanceFieldMasks.NormalizedVersions);
|
fieldMasks.Add(ProvenanceFieldMasks.NormalizedVersions);
|
||||||
}
|
}
|
||||||
|
|
||||||
var packageProvenance = new AdvisoryProvenance(
|
var packageProvenance = new AdvisoryProvenance(
|
||||||
KisaConnectorPlugin.SourceName,
|
KisaConnectorPlugin.SourceName,
|
||||||
"package",
|
"package",
|
||||||
identifier,
|
identifier,
|
||||||
recordedAt,
|
recordedAt,
|
||||||
fieldMasks);
|
fieldMasks);
|
||||||
|
|
||||||
packages.Add(new AffectedPackage(
|
packages.Add(new AffectedPackage(
|
||||||
AffectedPackageTypes.Vendor,
|
AffectedPackageTypes.Vendor,
|
||||||
identifier,
|
identifier,
|
||||||
platform: null,
|
platform: null,
|
||||||
versionRanges: artifacts.Ranges,
|
versionRanges: artifacts.Ranges,
|
||||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||||
provenance: new[] { packageProvenance },
|
provenance: new[] { packageProvenance },
|
||||||
normalizedVersions: artifacts.NormalizedVersions));
|
normalizedVersions: artifacts.NormalizedVersions));
|
||||||
}
|
}
|
||||||
|
|
||||||
return packages
|
return packages
|
||||||
.DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
.DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||||
.OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
.OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||||
.ToArray();
|
.ToArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static (IReadOnlyList<AffectedVersionRange> Ranges, IReadOnlyList<NormalizedVersionRule> NormalizedVersions) BuildVersionArtifacts(
|
private static (IReadOnlyList<AffectedVersionRange> Ranges, IReadOnlyList<NormalizedVersionRule> NormalizedVersions) BuildVersionArtifacts(
|
||||||
KisaParsedProduct product,
|
KisaParsedProduct product,
|
||||||
string provenanceValue,
|
string provenanceValue,
|
||||||
DateTimeOffset recordedAt)
|
DateTimeOffset recordedAt)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(product.Versions))
|
if (string.IsNullOrWhiteSpace(product.Versions))
|
||||||
{
|
{
|
||||||
var fallback = CreateFallbackRange(product.Versions ?? string.Empty, provenanceValue, recordedAt);
|
var fallback = CreateFallbackRange(product.Versions ?? string.Empty, provenanceValue, recordedAt);
|
||||||
return (new[] { fallback }, Array.Empty<NormalizedVersionRule>());
|
return (new[] { fallback }, Array.Empty<NormalizedVersionRule>());
|
||||||
}
|
}
|
||||||
|
|
||||||
var segment = product.Versions.Trim();
|
var segment = product.Versions.Trim();
|
||||||
var result = ParseRangeSegment(segment, provenanceValue, recordedAt);
|
var result = ParseRangeSegment(segment, provenanceValue, recordedAt);
|
||||||
|
|
||||||
var ranges = new[] { result.Range };
|
var ranges = new[] { result.Range };
|
||||||
var normalized = result.NormalizedRule is null
|
var normalized = result.NormalizedRule is null
|
||||||
? Array.Empty<NormalizedVersionRule>()
|
? Array.Empty<NormalizedVersionRule>()
|
||||||
: new[] { result.NormalizedRule };
|
: new[] { result.NormalizedRule };
|
||||||
|
|
||||||
return (ranges, normalized);
|
return (ranges, normalized);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static (AffectedVersionRange Range, NormalizedVersionRule? NormalizedRule) ParseRangeSegment(
|
private static (AffectedVersionRange Range, NormalizedVersionRule? NormalizedRule) ParseRangeSegment(
|
||||||
string segment,
|
string segment,
|
||||||
string provenanceValue,
|
string provenanceValue,
|
||||||
DateTimeOffset recordedAt)
|
DateTimeOffset recordedAt)
|
||||||
{
|
{
|
||||||
var trimmed = segment.Trim();
|
var trimmed = segment.Trim();
|
||||||
if (trimmed.Length == 0)
|
if (trimmed.Length == 0)
|
||||||
{
|
{
|
||||||
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
||||||
}
|
}
|
||||||
|
|
||||||
var matches = VersionPattern.Matches(trimmed);
|
var matches = VersionPattern.Matches(trimmed);
|
||||||
if (matches.Count == 0)
|
if (matches.Count == 0)
|
||||||
{
|
{
|
||||||
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
||||||
}
|
}
|
||||||
|
|
||||||
var startMatch = matches[0];
|
var startMatch = matches[0];
|
||||||
var startVersion = startMatch.Value;
|
var startVersion = startMatch.Value;
|
||||||
string? endVersion = matches.Count > 1 ? matches[1].Value : null;
|
string? endVersion = matches.Count > 1 ? matches[1].Value : null;
|
||||||
|
|
||||||
var prefix = trimmed[..startMatch.Index].Trim();
|
var prefix = trimmed[..startMatch.Index].Trim();
|
||||||
var startContext = ExtractSpan(trimmed, startMatch.Index + startMatch.Length, endVersion is not null ? matches[1].Index : trimmed.Length).Trim();
|
var startContext = ExtractSpan(trimmed, startMatch.Index + startMatch.Length, endVersion is not null ? matches[1].Index : trimmed.Length).Trim();
|
||||||
var endContext = endVersion is not null
|
var endContext = endVersion is not null
|
||||||
? trimmed[(matches[1].Index + matches[1].Length)..].Trim()
|
? trimmed[(matches[1].Index + matches[1].Length)..].Trim()
|
||||||
: string.Empty;
|
: string.Empty;
|
||||||
|
|
||||||
var introducedInclusive = DetermineStartInclusivity(prefix, startContext, trimmed);
|
var introducedInclusive = DetermineStartInclusivity(prefix, startContext, trimmed);
|
||||||
var endContextForInclusivity = endVersion is not null ? endContext : startContext;
|
var endContextForInclusivity = endVersion is not null ? endContext : startContext;
|
||||||
var fixedInclusive = DetermineEndInclusivity(endContextForInclusivity, trimmed);
|
var fixedInclusive = DetermineEndInclusivity(endContextForInclusivity, trimmed);
|
||||||
|
|
||||||
var hasInclusiveLowerMarker = ContainsAny(prefix, InclusiveStartMarkers) || ContainsAny(startContext, InclusiveStartMarkers);
|
var hasInclusiveLowerMarker = ContainsAny(prefix, InclusiveStartMarkers) || ContainsAny(startContext, InclusiveStartMarkers);
|
||||||
var hasExclusiveLowerMarker = ContainsAny(prefix, ExclusiveStartMarkers) || ContainsAny(startContext, ExclusiveStartMarkers);
|
var hasExclusiveLowerMarker = ContainsAny(prefix, ExclusiveStartMarkers) || ContainsAny(startContext, ExclusiveStartMarkers);
|
||||||
var hasInclusiveUpperMarker = ContainsAny(startContext, InclusiveEndMarkers) || ContainsAny(endContext, InclusiveEndMarkers);
|
var hasInclusiveUpperMarker = ContainsAny(startContext, InclusiveEndMarkers) || ContainsAny(endContext, InclusiveEndMarkers);
|
||||||
var hasExclusiveUpperMarker = ContainsAny(startContext, ExclusiveEndMarkers) || ContainsAny(endContext, ExclusiveEndMarkers);
|
var hasExclusiveUpperMarker = ContainsAny(startContext, ExclusiveEndMarkers) || ContainsAny(endContext, ExclusiveEndMarkers);
|
||||||
var hasUpperMarker = hasInclusiveUpperMarker || hasExclusiveUpperMarker;
|
var hasUpperMarker = hasInclusiveUpperMarker || hasExclusiveUpperMarker;
|
||||||
var hasLowerMarker = hasInclusiveLowerMarker || hasExclusiveLowerMarker;
|
var hasLowerMarker = hasInclusiveLowerMarker || hasExclusiveLowerMarker;
|
||||||
|
|
||||||
var introducedNormalized = TryFormatSemVer(startVersion);
|
var introducedNormalized = TryFormatSemVer(startVersion);
|
||||||
var fixedNormalized = endVersion is not null ? TryFormatSemVer(endVersion) : null;
|
var fixedNormalized = endVersion is not null ? TryFormatSemVer(endVersion) : null;
|
||||||
|
|
||||||
if (introducedNormalized is null || (endVersion is not null && fixedNormalized is null))
|
if (introducedNormalized is null || (endVersion is not null && fixedNormalized is null))
|
||||||
{
|
{
|
||||||
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
||||||
}
|
}
|
||||||
|
|
||||||
var coercedUpperOnly = endVersion is null && hasUpperMarker && !hasLowerMarker;
|
var coercedUpperOnly = endVersion is null && hasUpperMarker && !hasLowerMarker;
|
||||||
|
|
||||||
if (coercedUpperOnly)
|
if (coercedUpperOnly)
|
||||||
{
|
{
|
||||||
fixedNormalized = introducedNormalized;
|
fixedNormalized = introducedNormalized;
|
||||||
introducedNormalized = null;
|
introducedNormalized = null;
|
||||||
fixedInclusive = hasInclusiveUpperMarker && !hasExclusiveUpperMarker;
|
fixedInclusive = hasInclusiveUpperMarker && !hasExclusiveUpperMarker;
|
||||||
}
|
}
|
||||||
|
|
||||||
var constraintExpression = BuildConstraintExpression(
|
var constraintExpression = BuildConstraintExpression(
|
||||||
introducedNormalized,
|
introducedNormalized,
|
||||||
introducedInclusive,
|
introducedInclusive,
|
||||||
fixedNormalized,
|
fixedNormalized,
|
||||||
fixedInclusive);
|
fixedInclusive);
|
||||||
|
|
||||||
var vendorExtensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
var vendorExtensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||||
{
|
{
|
||||||
["kisa.range.raw"] = trimmed,
|
["kisa.range.raw"] = trimmed,
|
||||||
["kisa.version.start.raw"] = startVersion
|
["kisa.version.start.raw"] = startVersion
|
||||||
};
|
};
|
||||||
|
|
||||||
if (introducedNormalized is not null)
|
if (introducedNormalized is not null)
|
||||||
{
|
{
|
||||||
vendorExtensions["kisa.version.start.normalized"] = introducedNormalized;
|
vendorExtensions["kisa.version.start.normalized"] = introducedNormalized;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(prefix))
|
if (!string.IsNullOrWhiteSpace(prefix))
|
||||||
{
|
{
|
||||||
vendorExtensions["kisa.range.prefix"] = prefix;
|
vendorExtensions["kisa.range.prefix"] = prefix;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (coercedUpperOnly)
|
if (coercedUpperOnly)
|
||||||
{
|
{
|
||||||
vendorExtensions["kisa.version.end.raw"] = startVersion;
|
vendorExtensions["kisa.version.end.raw"] = startVersion;
|
||||||
vendorExtensions["kisa.version.end.normalized"] = fixedNormalized!;
|
vendorExtensions["kisa.version.end.normalized"] = fixedNormalized!;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (endVersion is not null)
|
if (endVersion is not null)
|
||||||
{
|
{
|
||||||
vendorExtensions["kisa.version.end.raw"] = endVersion;
|
vendorExtensions["kisa.version.end.raw"] = endVersion;
|
||||||
vendorExtensions["kisa.version.end.normalized"] = fixedNormalized!;
|
vendorExtensions["kisa.version.end.normalized"] = fixedNormalized!;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(startContext))
|
if (!string.IsNullOrWhiteSpace(startContext))
|
||||||
{
|
{
|
||||||
vendorExtensions["kisa.range.start.context"] = startContext;
|
vendorExtensions["kisa.range.start.context"] = startContext;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(endContext))
|
if (!string.IsNullOrWhiteSpace(endContext))
|
||||||
{
|
{
|
||||||
vendorExtensions["kisa.range.end.context"] = endContext;
|
vendorExtensions["kisa.range.end.context"] = endContext;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(constraintExpression))
|
if (!string.IsNullOrWhiteSpace(constraintExpression))
|
||||||
{
|
{
|
||||||
vendorExtensions["kisa.range.normalized"] = constraintExpression!;
|
vendorExtensions["kisa.range.normalized"] = constraintExpression!;
|
||||||
}
|
}
|
||||||
|
|
||||||
var semVerPrimitive = new SemVerPrimitive(
|
var semVerPrimitive = new SemVerPrimitive(
|
||||||
Introduced: introducedNormalized,
|
Introduced: introducedNormalized,
|
||||||
IntroducedInclusive: introducedInclusive,
|
IntroducedInclusive: introducedInclusive,
|
||||||
Fixed: fixedNormalized,
|
Fixed: fixedNormalized,
|
||||||
FixedInclusive: fixedInclusive,
|
FixedInclusive: fixedInclusive,
|
||||||
LastAffected: fixedNormalized,
|
LastAffected: fixedNormalized,
|
||||||
LastAffectedInclusive: fixedNormalized is not null ? fixedInclusive : introducedInclusive,
|
LastAffectedInclusive: fixedNormalized is not null ? fixedInclusive : introducedInclusive,
|
||||||
ConstraintExpression: constraintExpression,
|
ConstraintExpression: constraintExpression,
|
||||||
ExactValue: fixedNormalized is null && string.IsNullOrWhiteSpace(constraintExpression) ? introducedNormalized : null);
|
ExactValue: fixedNormalized is null && string.IsNullOrWhiteSpace(constraintExpression) ? introducedNormalized : null);
|
||||||
|
|
||||||
var range = new AffectedVersionRange(
|
var range = new AffectedVersionRange(
|
||||||
rangeKind: "product",
|
rangeKind: "product",
|
||||||
introducedVersion: semVerPrimitive.Introduced,
|
introducedVersion: semVerPrimitive.Introduced,
|
||||||
fixedVersion: semVerPrimitive.Fixed,
|
fixedVersion: semVerPrimitive.Fixed,
|
||||||
lastAffectedVersion: semVerPrimitive.LastAffected,
|
lastAffectedVersion: semVerPrimitive.LastAffected,
|
||||||
rangeExpression: trimmed,
|
rangeExpression: trimmed,
|
||||||
provenance: new AdvisoryProvenance(
|
provenance: new AdvisoryProvenance(
|
||||||
KisaConnectorPlugin.SourceName,
|
KisaConnectorPlugin.SourceName,
|
||||||
"package-range",
|
"package-range",
|
||||||
provenanceValue,
|
provenanceValue,
|
||||||
recordedAt,
|
recordedAt,
|
||||||
new[] { ProvenanceFieldMasks.VersionRanges }),
|
new[] { ProvenanceFieldMasks.VersionRanges }),
|
||||||
primitives: new RangePrimitives(semVerPrimitive, null, null, vendorExtensions));
|
primitives: new RangePrimitives(semVerPrimitive, null, null, vendorExtensions));
|
||||||
|
|
||||||
var normalizedRule = semVerPrimitive.ToNormalizedVersionRule(provenanceValue);
|
var normalizedRule = semVerPrimitive.ToNormalizedVersionRule(provenanceValue);
|
||||||
return (range, normalizedRule);
|
return (range, normalizedRule);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static AffectedVersionRange CreateFallbackRange(string raw, string provenanceValue, DateTimeOffset recordedAt)
|
private static AffectedVersionRange CreateFallbackRange(string raw, string provenanceValue, DateTimeOffset recordedAt)
|
||||||
{
|
{
|
||||||
var vendorExtensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
var vendorExtensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||||
if (!string.IsNullOrWhiteSpace(raw))
|
if (!string.IsNullOrWhiteSpace(raw))
|
||||||
{
|
{
|
||||||
vendorExtensions["kisa.range.raw"] = raw.Trim();
|
vendorExtensions["kisa.range.raw"] = raw.Trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
return new AffectedVersionRange(
|
return new AffectedVersionRange(
|
||||||
rangeKind: "string",
|
rangeKind: "string",
|
||||||
introducedVersion: null,
|
introducedVersion: null,
|
||||||
fixedVersion: null,
|
fixedVersion: null,
|
||||||
lastAffectedVersion: null,
|
lastAffectedVersion: null,
|
||||||
rangeExpression: raw,
|
rangeExpression: raw,
|
||||||
provenance: new AdvisoryProvenance(
|
provenance: new AdvisoryProvenance(
|
||||||
KisaConnectorPlugin.SourceName,
|
KisaConnectorPlugin.SourceName,
|
||||||
"package-range",
|
"package-range",
|
||||||
provenanceValue,
|
provenanceValue,
|
||||||
recordedAt,
|
recordedAt,
|
||||||
new[] { ProvenanceFieldMasks.VersionRanges }),
|
new[] { ProvenanceFieldMasks.VersionRanges }),
|
||||||
primitives: new RangePrimitives(null, null, null, vendorExtensions));
|
primitives: new RangePrimitives(null, null, null, vendorExtensions));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string ExtractSpan(string source, int start, int end)
|
private static string ExtractSpan(string source, int start, int end)
|
||||||
{
|
{
|
||||||
if (start >= end || start >= source.Length)
|
if (start >= end || start >= source.Length)
|
||||||
{
|
{
|
||||||
return string.Empty;
|
return string.Empty;
|
||||||
}
|
}
|
||||||
|
|
||||||
end = Math.Min(end, source.Length);
|
end = Math.Min(end, source.Length);
|
||||||
return source[start..end];
|
return source[start..end];
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string? TryFormatSemVer(string version)
|
private static string? TryFormatSemVer(string version)
|
||||||
{
|
{
|
||||||
var segments = version.Split('.', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
var segments = version.Split('.', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||||
if (segments.Length == 0)
|
if (segments.Length == 0)
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!TryParseInt(segments[0], out var major))
|
if (!TryParseInt(segments[0], out var major))
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
var minor = segments.Length > 1 && TryParseInt(segments[1], out var minorValue) ? minorValue : 0;
|
var minor = segments.Length > 1 && TryParseInt(segments[1], out var minorValue) ? minorValue : 0;
|
||||||
var patch = segments.Length > 2 && TryParseInt(segments[2], out var patchValue) ? patchValue : 0;
|
var patch = segments.Length > 2 && TryParseInt(segments[2], out var patchValue) ? patchValue : 0;
|
||||||
var baseVersion = $"{major}.{minor}.{patch}";
|
var baseVersion = $"{major}.{minor}.{patch}";
|
||||||
|
|
||||||
if (segments.Length <= 3)
|
if (segments.Length <= 3)
|
||||||
{
|
{
|
||||||
return baseVersion;
|
return baseVersion;
|
||||||
}
|
}
|
||||||
|
|
||||||
var extraIdentifiers = segments
|
var extraIdentifiers = segments
|
||||||
.Skip(3)
|
.Skip(3)
|
||||||
.Select(TrimLeadingZeros)
|
.Select(TrimLeadingZeros)
|
||||||
.Where(static part => part.Length > 0)
|
.Where(static part => part.Length > 0)
|
||||||
.ToArray();
|
.ToArray();
|
||||||
|
|
||||||
if (extraIdentifiers.Length == 0)
|
if (extraIdentifiers.Length == 0)
|
||||||
{
|
{
|
||||||
extraIdentifiers = new[] { "0" };
|
extraIdentifiers = new[] { "0" };
|
||||||
}
|
}
|
||||||
|
|
||||||
var allIdentifiers = new[] { "fw" }.Concat(extraIdentifiers);
|
var allIdentifiers = new[] { "fw" }.Concat(extraIdentifiers);
|
||||||
return $"{baseVersion}-{string.Join('.', allIdentifiers)}";
|
return $"{baseVersion}-{string.Join('.', allIdentifiers)}";
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string TrimLeadingZeros(string value)
|
private static string TrimLeadingZeros(string value)
|
||||||
{
|
{
|
||||||
var trimmed = value.TrimStart('0');
|
var trimmed = value.TrimStart('0');
|
||||||
return trimmed.Length == 0 ? "0" : trimmed;
|
return trimmed.Length == 0 ? "0" : trimmed;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static bool TryParseInt(string value, out int result)
|
private static bool TryParseInt(string value, out int result)
|
||||||
=> int.TryParse(value.Trim(), out result);
|
=> int.TryParse(value.Trim(), out result);
|
||||||
|
|
||||||
private static bool DetermineStartInclusivity(string prefix, string context, string fullSegment)
|
private static bool DetermineStartInclusivity(string prefix, string context, string fullSegment)
|
||||||
{
|
{
|
||||||
if (ContainsAny(prefix, ExclusiveStartMarkers) || ContainsAny(context, ExclusiveStartMarkers))
|
if (ContainsAny(prefix, ExclusiveStartMarkers) || ContainsAny(context, ExclusiveStartMarkers))
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fullSegment.Contains('~', StringComparison.Ordinal))
|
if (fullSegment.Contains('~', StringComparison.Ordinal))
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ContainsAny(prefix, InclusiveStartMarkers) || ContainsAny(context, InclusiveStartMarkers))
|
if (ContainsAny(prefix, InclusiveStartMarkers) || ContainsAny(context, InclusiveStartMarkers))
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static bool DetermineEndInclusivity(string context, string fullSegment)
|
private static bool DetermineEndInclusivity(string context, string fullSegment)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(context))
|
if (string.IsNullOrWhiteSpace(context))
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ContainsAny(context, ExclusiveEndMarkers))
|
if (ContainsAny(context, ExclusiveEndMarkers))
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fullSegment.Contains('~', StringComparison.Ordinal))
|
if (fullSegment.Contains('~', StringComparison.Ordinal))
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ContainsAny(context, InclusiveEndMarkers))
|
if (ContainsAny(context, InclusiveEndMarkers))
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string? BuildConstraintExpression(
|
private static string? BuildConstraintExpression(
|
||||||
string? introduced,
|
string? introduced,
|
||||||
bool introducedInclusive,
|
bool introducedInclusive,
|
||||||
string? fixedVersion,
|
string? fixedVersion,
|
||||||
bool fixedInclusive)
|
bool fixedInclusive)
|
||||||
{
|
{
|
||||||
var segments = new List<string>(capacity: 2);
|
var segments = new List<string>(capacity: 2);
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(introduced))
|
if (!string.IsNullOrWhiteSpace(introduced))
|
||||||
{
|
{
|
||||||
segments.Add($"{(introducedInclusive ? ">=" : ">")} {introduced}");
|
segments.Add($"{(introducedInclusive ? ">=" : ">")} {introduced}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(fixedVersion))
|
if (!string.IsNullOrWhiteSpace(fixedVersion))
|
||||||
{
|
{
|
||||||
segments.Add($"{(fixedInclusive ? "<=" : "<")} {fixedVersion}");
|
segments.Add($"{(fixedInclusive ? "<=" : "<")} {fixedVersion}");
|
||||||
}
|
}
|
||||||
|
|
||||||
return segments.Count == 0 ? null : string.Join(" ", segments);
|
return segments.Count == 0 ? null : string.Join(" ", segments);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static bool ContainsAny(string? value, IReadOnlyCollection<string> markers)
|
private static bool ContainsAny(string? value, IReadOnlyCollection<string> markers)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(value))
|
if (string.IsNullOrWhiteSpace(value))
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach (var marker in markers)
|
foreach (var marker in markers)
|
||||||
{
|
{
|
||||||
if (value.Contains(marker, StringComparison.Ordinal))
|
if (value.Contains(marker, StringComparison.Ordinal))
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string CreateSlug(string value)
|
private static string CreateSlug(string value)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(value))
|
if (string.IsNullOrWhiteSpace(value))
|
||||||
{
|
{
|
||||||
return "kisa-product";
|
return "kisa-product";
|
||||||
}
|
}
|
||||||
|
|
||||||
Span<char> buffer = stackalloc char[value.Length];
|
Span<char> buffer = stackalloc char[value.Length];
|
||||||
var index = 0;
|
var index = 0;
|
||||||
foreach (var ch in value.ToLowerInvariant())
|
foreach (var ch in value.ToLowerInvariant())
|
||||||
{
|
{
|
||||||
if (char.IsLetterOrDigit(ch))
|
if (char.IsLetterOrDigit(ch))
|
||||||
{
|
{
|
||||||
buffer[index++] = ch;
|
buffer[index++] = ch;
|
||||||
}
|
}
|
||||||
else if (char.IsWhiteSpace(ch) || ch is '-' or '_' or '.' or '/')
|
else if (char.IsWhiteSpace(ch) || ch is '-' or '_' or '.' or '/')
|
||||||
{
|
{
|
||||||
if (index == 0 || buffer[index - 1] == '-')
|
if (index == 0 || buffer[index - 1] == '-')
|
||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
buffer[index++] = '-';
|
buffer[index++] = '-';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (index == 0)
|
if (index == 0)
|
||||||
{
|
{
|
||||||
return "kisa-product";
|
return "kisa-product";
|
||||||
}
|
}
|
||||||
|
|
||||||
var slug = new string(buffer[..index]).Trim('-');
|
var slug = new string(buffer[..index]).Trim('-');
|
||||||
return string.IsNullOrWhiteSpace(slug) ? "kisa-product" : slug;
|
return string.IsNullOrWhiteSpace(slug) ? "kisa-product" : slug;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static readonly Regex VersionPattern = new(@"\d+(?:\.\d+){1,3}", RegexOptions.Compiled);
|
private static readonly Regex VersionPattern = new(@"\d+(?:\.\d+){1,3}", RegexOptions.Compiled);
|
||||||
|
|
||||||
private static readonly string[] InclusiveStartMarkers = { "이상" };
|
private static readonly string[] InclusiveStartMarkers = { "이상" };
|
||||||
private static readonly string[] ExclusiveStartMarkers = { "초과" };
|
private static readonly string[] ExclusiveStartMarkers = { "초과" };
|
||||||
private static readonly string[] InclusiveEndMarkers = { "이하" };
|
private static readonly string[] InclusiveEndMarkers = { "이하" };
|
||||||
private static readonly string[] ExclusiveEndMarkers = { "미만" };
|
private static readonly string[] ExclusiveEndMarkers = { "미만" };
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ using StellaOps.Concelier.Connector.Kisa.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Kisa.Internal;
|
using StellaOps.Concelier.Connector.Kisa.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Kisa;
|
namespace StellaOps.Concelier.Connector.Kisa;
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Text.Json;
|
using System.Text.Json;
|
||||||
using NuGet.Versioning;
|
using NuGet.Versioning;
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Normalization.Identifiers;
|
using StellaOps.Concelier.Normalization.Identifiers;
|
||||||
using StellaOps.Concelier.Normalization.Cvss;
|
using StellaOps.Concelier.Normalization.Cvss;
|
||||||
using StellaOps.Concelier.Normalization.Text;
|
using StellaOps.Concelier.Normalization.Text;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Nvd.Internal;
|
namespace StellaOps.Concelier.Connector.Nvd.Internal;
|
||||||
|
|
||||||
@@ -49,30 +49,30 @@ internal static class NvdMapper
|
|||||||
var modified = TryGetDateTime(cve, "lastModified");
|
var modified = TryGetDateTime(cve, "lastModified");
|
||||||
var description = GetNormalizedDescription(cve);
|
var description = GetNormalizedDescription(cve);
|
||||||
|
|
||||||
var weaknessMetadata = GetWeaknessMetadata(cve);
|
var weaknessMetadata = GetWeaknessMetadata(cve);
|
||||||
var references = GetReferences(cve, sourceDocument, recordedAt, weaknessMetadata);
|
var references = GetReferences(cve, sourceDocument, recordedAt, weaknessMetadata);
|
||||||
var affectedPackages = GetAffectedPackages(cve, cveId, sourceDocument, recordedAt);
|
var affectedPackages = GetAffectedPackages(cve, cveId, sourceDocument, recordedAt);
|
||||||
var cvssMetrics = GetCvssMetrics(cve, sourceDocument, recordedAt, out var severity);
|
var cvssMetrics = GetCvssMetrics(cve, sourceDocument, recordedAt, out var severity);
|
||||||
var weaknesses = BuildWeaknesses(weaknessMetadata, recordedAt);
|
var weaknesses = BuildWeaknesses(weaknessMetadata, recordedAt);
|
||||||
var canonicalMetricId = cvssMetrics.Count > 0
|
var canonicalMetricId = cvssMetrics.Count > 0
|
||||||
? $"{cvssMetrics[0].Version}|{cvssMetrics[0].Vector}"
|
? $"{cvssMetrics[0].Version}|{cvssMetrics[0].Vector}"
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
var provenance = new[]
|
var provenance = new[]
|
||||||
{
|
{
|
||||||
new AdvisoryProvenance(
|
new AdvisoryProvenance(
|
||||||
NvdConnectorPlugin.SourceName,
|
NvdConnectorPlugin.SourceName,
|
||||||
"document",
|
"document",
|
||||||
sourceDocument.Uri,
|
sourceDocument.Uri,
|
||||||
sourceDocument.FetchedAt,
|
sourceDocument.FetchedAt,
|
||||||
new[] { ProvenanceFieldMasks.Advisory }),
|
new[] { ProvenanceFieldMasks.Advisory }),
|
||||||
new AdvisoryProvenance(
|
new AdvisoryProvenance(
|
||||||
NvdConnectorPlugin.SourceName,
|
NvdConnectorPlugin.SourceName,
|
||||||
"mapping",
|
"mapping",
|
||||||
string.IsNullOrWhiteSpace(cveId) ? advisoryKey : cveId,
|
string.IsNullOrWhiteSpace(cveId) ? advisoryKey : cveId,
|
||||||
recordedAt,
|
recordedAt,
|
||||||
new[] { ProvenanceFieldMasks.Advisory }),
|
new[] { ProvenanceFieldMasks.Advisory }),
|
||||||
};
|
};
|
||||||
|
|
||||||
var title = string.IsNullOrWhiteSpace(cveId) ? advisoryKey : cveId;
|
var title = string.IsNullOrWhiteSpace(cveId) ? advisoryKey : cveId;
|
||||||
|
|
||||||
@@ -83,24 +83,24 @@ internal static class NvdMapper
|
|||||||
}
|
}
|
||||||
|
|
||||||
aliasCandidates.Add(advisoryKey);
|
aliasCandidates.Add(advisoryKey);
|
||||||
|
|
||||||
var advisory = new Advisory(
|
var advisory = new Advisory(
|
||||||
advisoryKey: advisoryKey,
|
advisoryKey: advisoryKey,
|
||||||
title: title,
|
title: title,
|
||||||
summary: string.IsNullOrEmpty(description.Text) ? null : description.Text,
|
summary: string.IsNullOrEmpty(description.Text) ? null : description.Text,
|
||||||
language: description.Language,
|
language: description.Language,
|
||||||
published: published,
|
published: published,
|
||||||
modified: modified,
|
modified: modified,
|
||||||
severity: severity,
|
severity: severity,
|
||||||
exploitKnown: false,
|
exploitKnown: false,
|
||||||
aliases: aliasCandidates,
|
aliases: aliasCandidates,
|
||||||
references: references,
|
references: references,
|
||||||
affectedPackages: affectedPackages,
|
affectedPackages: affectedPackages,
|
||||||
cvssMetrics: cvssMetrics,
|
cvssMetrics: cvssMetrics,
|
||||||
provenance: provenance,
|
provenance: provenance,
|
||||||
description: string.IsNullOrEmpty(description.Text) ? null : description.Text,
|
description: string.IsNullOrEmpty(description.Text) ? null : description.Text,
|
||||||
cwes: weaknesses,
|
cwes: weaknesses,
|
||||||
canonicalMetricId: canonicalMetricId);
|
canonicalMetricId: canonicalMetricId);
|
||||||
|
|
||||||
advisories.Add(advisory);
|
advisories.Add(advisory);
|
||||||
index++;
|
index++;
|
||||||
@@ -149,22 +149,22 @@ internal static class NvdMapper
|
|||||||
return DateTimeOffset.TryParse(property.GetString(), out var parsed) ? parsed : null;
|
return DateTimeOffset.TryParse(property.GetString(), out var parsed) ? parsed : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IReadOnlyList<AdvisoryReference> GetReferences(
|
private static IReadOnlyList<AdvisoryReference> GetReferences(
|
||||||
JsonElement cve,
|
JsonElement cve,
|
||||||
DocumentRecord document,
|
DocumentRecord document,
|
||||||
DateTimeOffset recordedAt,
|
DateTimeOffset recordedAt,
|
||||||
IReadOnlyList<WeaknessMetadata> weaknesses)
|
IReadOnlyList<WeaknessMetadata> weaknesses)
|
||||||
{
|
{
|
||||||
var references = new List<AdvisoryReference>();
|
var references = new List<AdvisoryReference>();
|
||||||
if (!cve.TryGetProperty("references", out var referencesElement) || referencesElement.ValueKind != JsonValueKind.Array)
|
if (!cve.TryGetProperty("references", out var referencesElement) || referencesElement.ValueKind != JsonValueKind.Array)
|
||||||
{
|
{
|
||||||
AppendWeaknessReferences(references, weaknesses, recordedAt);
|
AppendWeaknessReferences(references, weaknesses, recordedAt);
|
||||||
return references;
|
return references;
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach (var reference in referencesElement.EnumerateArray())
|
foreach (var reference in referencesElement.EnumerateArray())
|
||||||
{
|
{
|
||||||
if (!reference.TryGetProperty("url", out var urlElement) || urlElement.ValueKind != JsonValueKind.String)
|
if (!reference.TryGetProperty("url", out var urlElement) || urlElement.ValueKind != JsonValueKind.String)
|
||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -187,138 +187,138 @@ internal static class NvdMapper
|
|||||||
kind: kind,
|
kind: kind,
|
||||||
sourceTag: sourceTag,
|
sourceTag: sourceTag,
|
||||||
summary: null,
|
summary: null,
|
||||||
provenance: new AdvisoryProvenance(
|
provenance: new AdvisoryProvenance(
|
||||||
NvdConnectorPlugin.SourceName,
|
NvdConnectorPlugin.SourceName,
|
||||||
"reference",
|
"reference",
|
||||||
url,
|
url,
|
||||||
recordedAt,
|
recordedAt,
|
||||||
new[] { ProvenanceFieldMasks.References })));
|
new[] { ProvenanceFieldMasks.References })));
|
||||||
}
|
}
|
||||||
|
|
||||||
AppendWeaknessReferences(references, weaknesses, recordedAt);
|
|
||||||
return references;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static IReadOnlyList<WeaknessMetadata> GetWeaknessMetadata(JsonElement cve)
|
|
||||||
{
|
|
||||||
if (!cve.TryGetProperty("weaknesses", out var weaknesses) || weaknesses.ValueKind != JsonValueKind.Array)
|
|
||||||
{
|
|
||||||
return Array.Empty<WeaknessMetadata>();
|
|
||||||
}
|
|
||||||
|
|
||||||
var list = new List<WeaknessMetadata>(weaknesses.GetArrayLength());
|
|
||||||
foreach (var weakness in weaknesses.EnumerateArray())
|
|
||||||
{
|
|
||||||
if (!weakness.TryGetProperty("description", out var descriptions) || descriptions.ValueKind != JsonValueKind.Array)
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
string? cweId = null;
|
|
||||||
string? name = null;
|
|
||||||
|
|
||||||
foreach (var description in descriptions.EnumerateArray())
|
|
||||||
{
|
|
||||||
if (description.ValueKind != JsonValueKind.Object)
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!description.TryGetProperty("value", out var valueElement) || valueElement.ValueKind != JsonValueKind.String)
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
var value = valueElement.GetString();
|
|
||||||
if (string.IsNullOrWhiteSpace(value))
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
var trimmed = value.Trim();
|
|
||||||
if (trimmed.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
|
|
||||||
{
|
|
||||||
cweId ??= trimmed.ToUpperInvariant();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
name ??= trimmed;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(cweId))
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
list.Add(new WeaknessMetadata(cweId, name));
|
|
||||||
}
|
|
||||||
|
|
||||||
return list.Count == 0 ? Array.Empty<WeaknessMetadata>() : list;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static IReadOnlyList<AdvisoryWeakness> BuildWeaknesses(IReadOnlyList<WeaknessMetadata> metadata, DateTimeOffset recordedAt)
|
|
||||||
{
|
|
||||||
if (metadata.Count == 0)
|
|
||||||
{
|
|
||||||
return Array.Empty<AdvisoryWeakness>();
|
|
||||||
}
|
|
||||||
|
|
||||||
var list = new List<AdvisoryWeakness>(metadata.Count);
|
|
||||||
foreach (var entry in metadata)
|
|
||||||
{
|
|
||||||
var provenance = new AdvisoryProvenance(
|
|
||||||
NvdConnectorPlugin.SourceName,
|
|
||||||
"weakness",
|
|
||||||
entry.CweId,
|
|
||||||
recordedAt,
|
|
||||||
new[] { ProvenanceFieldMasks.Weaknesses });
|
|
||||||
|
|
||||||
var provenanceArray = ImmutableArray.Create(provenance);
|
|
||||||
list.Add(new AdvisoryWeakness(
|
|
||||||
taxonomy: "cwe",
|
|
||||||
identifier: entry.CweId,
|
|
||||||
name: entry.Name,
|
|
||||||
uri: BuildCweUrl(entry.CweId),
|
|
||||||
provenance: provenanceArray));
|
|
||||||
}
|
|
||||||
|
|
||||||
return list;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void AppendWeaknessReferences(
|
|
||||||
List<AdvisoryReference> references,
|
|
||||||
IReadOnlyList<WeaknessMetadata> weaknesses,
|
|
||||||
DateTimeOffset recordedAt)
|
|
||||||
{
|
|
||||||
if (weaknesses.Count == 0)
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
var existing = new HashSet<string>(references.Select(reference => reference.Url), StringComparer.OrdinalIgnoreCase);
|
|
||||||
|
|
||||||
foreach (var weakness in weaknesses)
|
|
||||||
{
|
|
||||||
var url = BuildCweUrl(weakness.CweId);
|
|
||||||
if (url is null || existing.Contains(url))
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
var provenance = new AdvisoryProvenance(
|
|
||||||
NvdConnectorPlugin.SourceName,
|
|
||||||
"reference",
|
|
||||||
url,
|
|
||||||
recordedAt,
|
|
||||||
new[] { ProvenanceFieldMasks.References });
|
|
||||||
|
|
||||||
references.Add(new AdvisoryReference(url, "weakness", weakness.CweId, weakness.Name, provenance));
|
|
||||||
existing.Add(url);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static IReadOnlyList<AffectedPackage> GetAffectedPackages(JsonElement cve, string? cveId, DocumentRecord document, DateTimeOffset recordedAt)
|
AppendWeaknessReferences(references, weaknesses, recordedAt);
|
||||||
|
return references;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyList<WeaknessMetadata> GetWeaknessMetadata(JsonElement cve)
|
||||||
|
{
|
||||||
|
if (!cve.TryGetProperty("weaknesses", out var weaknesses) || weaknesses.ValueKind != JsonValueKind.Array)
|
||||||
|
{
|
||||||
|
return Array.Empty<WeaknessMetadata>();
|
||||||
|
}
|
||||||
|
|
||||||
|
var list = new List<WeaknessMetadata>(weaknesses.GetArrayLength());
|
||||||
|
foreach (var weakness in weaknesses.EnumerateArray())
|
||||||
|
{
|
||||||
|
if (!weakness.TryGetProperty("description", out var descriptions) || descriptions.ValueKind != JsonValueKind.Array)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
string? cweId = null;
|
||||||
|
string? name = null;
|
||||||
|
|
||||||
|
foreach (var description in descriptions.EnumerateArray())
|
||||||
|
{
|
||||||
|
if (description.ValueKind != JsonValueKind.Object)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!description.TryGetProperty("value", out var valueElement) || valueElement.ValueKind != JsonValueKind.String)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var value = valueElement.GetString();
|
||||||
|
if (string.IsNullOrWhiteSpace(value))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var trimmed = value.Trim();
|
||||||
|
if (trimmed.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
cweId ??= trimmed.ToUpperInvariant();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
name ??= trimmed;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(cweId))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
list.Add(new WeaknessMetadata(cweId, name));
|
||||||
|
}
|
||||||
|
|
||||||
|
return list.Count == 0 ? Array.Empty<WeaknessMetadata>() : list;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyList<AdvisoryWeakness> BuildWeaknesses(IReadOnlyList<WeaknessMetadata> metadata, DateTimeOffset recordedAt)
|
||||||
|
{
|
||||||
|
if (metadata.Count == 0)
|
||||||
|
{
|
||||||
|
return Array.Empty<AdvisoryWeakness>();
|
||||||
|
}
|
||||||
|
|
||||||
|
var list = new List<AdvisoryWeakness>(metadata.Count);
|
||||||
|
foreach (var entry in metadata)
|
||||||
|
{
|
||||||
|
var provenance = new AdvisoryProvenance(
|
||||||
|
NvdConnectorPlugin.SourceName,
|
||||||
|
"weakness",
|
||||||
|
entry.CweId,
|
||||||
|
recordedAt,
|
||||||
|
new[] { ProvenanceFieldMasks.Weaknesses });
|
||||||
|
|
||||||
|
var provenanceArray = ImmutableArray.Create(provenance);
|
||||||
|
list.Add(new AdvisoryWeakness(
|
||||||
|
taxonomy: "cwe",
|
||||||
|
identifier: entry.CweId,
|
||||||
|
name: entry.Name,
|
||||||
|
uri: BuildCweUrl(entry.CweId),
|
||||||
|
provenance: provenanceArray));
|
||||||
|
}
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void AppendWeaknessReferences(
|
||||||
|
List<AdvisoryReference> references,
|
||||||
|
IReadOnlyList<WeaknessMetadata> weaknesses,
|
||||||
|
DateTimeOffset recordedAt)
|
||||||
|
{
|
||||||
|
if (weaknesses.Count == 0)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var existing = new HashSet<string>(references.Select(reference => reference.Url), StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
foreach (var weakness in weaknesses)
|
||||||
|
{
|
||||||
|
var url = BuildCweUrl(weakness.CweId);
|
||||||
|
if (url is null || existing.Contains(url))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var provenance = new AdvisoryProvenance(
|
||||||
|
NvdConnectorPlugin.SourceName,
|
||||||
|
"reference",
|
||||||
|
url,
|
||||||
|
recordedAt,
|
||||||
|
new[] { ProvenanceFieldMasks.References });
|
||||||
|
|
||||||
|
references.Add(new AdvisoryReference(url, "weakness", weakness.CweId, weakness.Name, provenance));
|
||||||
|
existing.Add(url);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyList<AffectedPackage> GetAffectedPackages(JsonElement cve, string? cveId, DocumentRecord document, DateTimeOffset recordedAt)
|
||||||
{
|
{
|
||||||
var packages = new Dictionary<string, PackageAccumulator>(StringComparer.Ordinal);
|
var packages = new Dictionary<string, PackageAccumulator>(StringComparer.Ordinal);
|
||||||
if (!cve.TryGetProperty("configurations", out var configurations) || configurations.ValueKind != JsonValueKind.Object)
|
if (!cve.TryGetProperty("configurations", out var configurations) || configurations.ValueKind != JsonValueKind.Object)
|
||||||
@@ -360,12 +360,12 @@ internal static class NvdMapper
|
|||||||
? normalizedCpe
|
? normalizedCpe
|
||||||
: criteria.Trim();
|
: criteria.Trim();
|
||||||
|
|
||||||
var provenance = new AdvisoryProvenance(
|
var provenance = new AdvisoryProvenance(
|
||||||
NvdConnectorPlugin.SourceName,
|
NvdConnectorPlugin.SourceName,
|
||||||
"cpe",
|
"cpe",
|
||||||
document.Uri,
|
document.Uri,
|
||||||
recordedAt,
|
recordedAt,
|
||||||
new[] { ProvenanceFieldMasks.AffectedPackages });
|
new[] { ProvenanceFieldMasks.AffectedPackages });
|
||||||
if (!packages.TryGetValue(identifier, out var accumulator))
|
if (!packages.TryGetValue(identifier, out var accumulator))
|
||||||
{
|
{
|
||||||
accumulator = new PackageAccumulator();
|
accumulator = new PackageAccumulator();
|
||||||
@@ -387,9 +387,9 @@ internal static class NvdMapper
|
|||||||
return Array.Empty<AffectedPackage>();
|
return Array.Empty<AffectedPackage>();
|
||||||
}
|
}
|
||||||
|
|
||||||
return packages
|
return packages
|
||||||
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
|
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
|
||||||
.Select(kvp =>
|
.Select(kvp =>
|
||||||
{
|
{
|
||||||
var ranges = kvp.Value.Ranges.Count == 0
|
var ranges = kvp.Value.Ranges.Count == 0
|
||||||
? Array.Empty<AffectedVersionRange>()
|
? Array.Empty<AffectedVersionRange>()
|
||||||
@@ -404,33 +404,33 @@ internal static class NvdMapper
|
|||||||
.ThenBy(static p => p.RecordedAt.UtcDateTime)
|
.ThenBy(static p => p.RecordedAt.UtcDateTime)
|
||||||
.ToArray();
|
.ToArray();
|
||||||
|
|
||||||
var normalizedNote = string.IsNullOrWhiteSpace(cveId)
|
var normalizedNote = string.IsNullOrWhiteSpace(cveId)
|
||||||
? $"nvd:{document.Id:N}"
|
? $"nvd:{document.Id:N}"
|
||||||
: $"nvd:{cveId}";
|
: $"nvd:{cveId}";
|
||||||
|
|
||||||
var normalizedVersions = new List<NormalizedVersionRule>(ranges.Length);
|
var normalizedVersions = new List<NormalizedVersionRule>(ranges.Length);
|
||||||
foreach (var range in ranges)
|
foreach (var range in ranges)
|
||||||
{
|
{
|
||||||
var rule = range.ToNormalizedVersionRule(normalizedNote);
|
var rule = range.ToNormalizedVersionRule(normalizedNote);
|
||||||
if (rule is not null)
|
if (rule is not null)
|
||||||
{
|
{
|
||||||
normalizedVersions.Add(rule);
|
normalizedVersions.Add(rule);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new AffectedPackage(
|
return new AffectedPackage(
|
||||||
type: AffectedPackageTypes.Cpe,
|
type: AffectedPackageTypes.Cpe,
|
||||||
identifier: kvp.Key,
|
identifier: kvp.Key,
|
||||||
platform: null,
|
platform: null,
|
||||||
versionRanges: ranges,
|
versionRanges: ranges,
|
||||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||||
provenance: provenance,
|
provenance: provenance,
|
||||||
normalizedVersions: normalizedVersions.Count == 0
|
normalizedVersions: normalizedVersions.Count == 0
|
||||||
? Array.Empty<NormalizedVersionRule>()
|
? Array.Empty<NormalizedVersionRule>()
|
||||||
: normalizedVersions.ToArray());
|
: normalizedVersions.ToArray());
|
||||||
})
|
})
|
||||||
.ToArray();
|
.ToArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IReadOnlyList<CvssMetric> GetCvssMetrics(JsonElement cve, DocumentRecord document, DateTimeOffset recordedAt, out string? severity)
|
private static IReadOnlyList<CvssMetric> GetCvssMetrics(JsonElement cve, DocumentRecord document, DateTimeOffset recordedAt, out string? severity)
|
||||||
{
|
{
|
||||||
@@ -488,12 +488,12 @@ internal static class NvdMapper
|
|||||||
|
|
||||||
severity ??= normalized.BaseSeverity;
|
severity ??= normalized.BaseSeverity;
|
||||||
|
|
||||||
list.Add(normalized.ToModel(new AdvisoryProvenance(
|
list.Add(normalized.ToModel(new AdvisoryProvenance(
|
||||||
NvdConnectorPlugin.SourceName,
|
NvdConnectorPlugin.SourceName,
|
||||||
"cvss",
|
"cvss",
|
||||||
normalized.Vector,
|
normalized.Vector,
|
||||||
recordedAt,
|
recordedAt,
|
||||||
new[] { ProvenanceFieldMasks.CvssMetrics })));
|
new[] { ProvenanceFieldMasks.CvssMetrics })));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (list.Count > 0)
|
if (list.Count > 0)
|
||||||
@@ -557,186 +557,186 @@ internal static class NvdMapper
|
|||||||
vendorExtensions["version"] = version;
|
vendorExtensions["version"] = version;
|
||||||
}
|
}
|
||||||
|
|
||||||
string? introduced = null;
|
string? introduced = null;
|
||||||
string? fixedVersion = null;
|
string? fixedVersion = null;
|
||||||
string? lastAffected = null;
|
string? lastAffected = null;
|
||||||
string? exactVersion = null;
|
string? exactVersion = null;
|
||||||
var expressionParts = new List<string>();
|
var expressionParts = new List<string>();
|
||||||
|
|
||||||
var introducedInclusive = true;
|
var introducedInclusive = true;
|
||||||
var fixedInclusive = false;
|
var fixedInclusive = false;
|
||||||
var lastInclusive = true;
|
var lastInclusive = true;
|
||||||
|
|
||||||
if (versionStartIncluding is not null)
|
if (versionStartIncluding is not null)
|
||||||
{
|
{
|
||||||
introduced = versionStartIncluding;
|
introduced = versionStartIncluding;
|
||||||
introducedInclusive = true;
|
introducedInclusive = true;
|
||||||
expressionParts.Add($">={versionStartIncluding}");
|
expressionParts.Add($">={versionStartIncluding}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (versionStartExcluding is not null)
|
if (versionStartExcluding is not null)
|
||||||
{
|
{
|
||||||
if (introduced is null)
|
if (introduced is null)
|
||||||
{
|
{
|
||||||
introduced = versionStartExcluding;
|
introduced = versionStartExcluding;
|
||||||
introducedInclusive = false;
|
introducedInclusive = false;
|
||||||
}
|
}
|
||||||
expressionParts.Add($">{versionStartExcluding}");
|
expressionParts.Add($">{versionStartExcluding}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (versionEndExcluding is not null)
|
if (versionEndExcluding is not null)
|
||||||
{
|
{
|
||||||
fixedVersion = versionEndExcluding;
|
fixedVersion = versionEndExcluding;
|
||||||
fixedInclusive = false;
|
fixedInclusive = false;
|
||||||
expressionParts.Add($"<{versionEndExcluding}");
|
expressionParts.Add($"<{versionEndExcluding}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (versionEndIncluding is not null)
|
if (versionEndIncluding is not null)
|
||||||
{
|
{
|
||||||
lastAffected = versionEndIncluding;
|
lastAffected = versionEndIncluding;
|
||||||
lastInclusive = true;
|
lastInclusive = true;
|
||||||
expressionParts.Add($"<={versionEndIncluding}");
|
expressionParts.Add($"<={versionEndIncluding}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (version is not null)
|
if (version is not null)
|
||||||
{
|
{
|
||||||
introduced = version;
|
introduced = version;
|
||||||
introducedInclusive = true;
|
introducedInclusive = true;
|
||||||
lastAffected = version;
|
lastAffected = version;
|
||||||
lastInclusive = true;
|
lastInclusive = true;
|
||||||
exactVersion = version;
|
exactVersion = version;
|
||||||
expressionParts.Add($"=={version}");
|
expressionParts.Add($"=={version}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (introduced is null && fixedVersion is null && lastAffected is null && vendorExtensions.Count == 0)
|
if (introduced is null && fixedVersion is null && lastAffected is null && vendorExtensions.Count == 0)
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
var rangeExpression = expressionParts.Count > 0 ? string.Join(' ', expressionParts) : null;
|
var rangeExpression = expressionParts.Count > 0 ? string.Join(' ', expressionParts) : null;
|
||||||
IReadOnlyDictionary<string, string>? extensions = vendorExtensions.Count == 0 ? null : vendorExtensions;
|
IReadOnlyDictionary<string, string>? extensions = vendorExtensions.Count == 0 ? null : vendorExtensions;
|
||||||
|
|
||||||
SemVerPrimitive? semVerPrimitive = null;
|
|
||||||
if (TryBuildSemVerPrimitive(
|
|
||||||
introduced,
|
|
||||||
introducedInclusive,
|
|
||||||
fixedVersion,
|
|
||||||
fixedInclusive,
|
|
||||||
lastAffected,
|
|
||||||
lastInclusive,
|
|
||||||
exactVersion,
|
|
||||||
rangeExpression,
|
|
||||||
out var primitive))
|
|
||||||
{
|
|
||||||
semVerPrimitive = primitive;
|
|
||||||
}
|
|
||||||
|
|
||||||
var primitives = semVerPrimitive is null && extensions is null
|
|
||||||
? null
|
|
||||||
: new RangePrimitives(semVerPrimitive, null, null, extensions);
|
|
||||||
|
|
||||||
var provenanceValue = provenance.Value ?? criteria;
|
|
||||||
var rangeProvenance = new AdvisoryProvenance(
|
|
||||||
provenance.Source,
|
|
||||||
provenance.Kind,
|
|
||||||
provenanceValue,
|
|
||||||
provenance.RecordedAt,
|
|
||||||
new[] { ProvenanceFieldMasks.VersionRanges });
|
|
||||||
|
|
||||||
return new AffectedVersionRange(
|
|
||||||
rangeKind: "cpe",
|
|
||||||
introducedVersion: introduced,
|
|
||||||
fixedVersion: fixedVersion,
|
|
||||||
lastAffectedVersion: lastAffected,
|
|
||||||
rangeExpression: rangeExpression,
|
|
||||||
provenance: rangeProvenance,
|
|
||||||
primitives);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static bool TryBuildSemVerPrimitive(
|
|
||||||
string? introduced,
|
|
||||||
bool introducedInclusive,
|
|
||||||
string? fixedVersion,
|
|
||||||
bool fixedInclusive,
|
|
||||||
string? lastAffected,
|
|
||||||
bool lastInclusive,
|
|
||||||
string? exactVersion,
|
|
||||||
string? constraintExpression,
|
|
||||||
out SemVerPrimitive? primitive)
|
|
||||||
{
|
|
||||||
primitive = null;
|
|
||||||
|
|
||||||
if (!TryNormalizeSemVer(introduced, out var normalizedIntroduced)
|
|
||||||
|| !TryNormalizeSemVer(fixedVersion, out var normalizedFixed)
|
|
||||||
|| !TryNormalizeSemVer(lastAffected, out var normalizedLast)
|
|
||||||
|| !TryNormalizeSemVer(exactVersion, out var normalizedExact))
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (normalizedIntroduced is null && normalizedFixed is null && normalizedLast is null && normalizedExact is null)
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
primitive = new SemVerPrimitive(
|
|
||||||
Introduced: normalizedIntroduced,
|
|
||||||
IntroducedInclusive: normalizedIntroduced is null ? true : introducedInclusive,
|
|
||||||
Fixed: normalizedFixed,
|
|
||||||
FixedInclusive: normalizedFixed is null ? false : fixedInclusive,
|
|
||||||
LastAffected: normalizedLast,
|
|
||||||
LastAffectedInclusive: normalizedLast is null ? false : lastInclusive,
|
|
||||||
ConstraintExpression: constraintExpression,
|
|
||||||
ExactValue: normalizedExact);
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static bool TryNormalizeSemVer(string? value, out string? normalized)
|
|
||||||
{
|
|
||||||
normalized = null;
|
|
||||||
if (string.IsNullOrWhiteSpace(value))
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
var trimmed = value.Trim();
|
|
||||||
if (trimmed.StartsWith("v", StringComparison.OrdinalIgnoreCase) && trimmed.Length > 1)
|
|
||||||
{
|
|
||||||
trimmed = trimmed[1..];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!NuGetVersion.TryParse(trimmed, out var parsed))
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
normalized = parsed.ToNormalizedString();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static string? BuildCweUrl(string cweId)
|
SemVerPrimitive? semVerPrimitive = null;
|
||||||
{
|
if (TryBuildSemVerPrimitive(
|
||||||
var dashIndex = cweId.IndexOf('-');
|
introduced,
|
||||||
if (dashIndex < 0 || dashIndex == cweId.Length - 1)
|
introducedInclusive,
|
||||||
{
|
fixedVersion,
|
||||||
return null;
|
fixedInclusive,
|
||||||
}
|
lastAffected,
|
||||||
|
lastInclusive,
|
||||||
var digits = new StringBuilder();
|
exactVersion,
|
||||||
for (var i = dashIndex + 1; i < cweId.Length; i++)
|
rangeExpression,
|
||||||
{
|
out var primitive))
|
||||||
var ch = cweId[i];
|
{
|
||||||
if (char.IsDigit(ch))
|
semVerPrimitive = primitive;
|
||||||
{
|
}
|
||||||
digits.Append(ch);
|
|
||||||
}
|
var primitives = semVerPrimitive is null && extensions is null
|
||||||
}
|
? null
|
||||||
|
: new RangePrimitives(semVerPrimitive, null, null, extensions);
|
||||||
return digits.Length == 0 ? null : $"https://cwe.mitre.org/data/definitions/{digits}.html";
|
|
||||||
}
|
var provenanceValue = provenance.Value ?? criteria;
|
||||||
|
var rangeProvenance = new AdvisoryProvenance(
|
||||||
private static string? TryExtractVersionFromCriteria(string criteria)
|
provenance.Source,
|
||||||
|
provenance.Kind,
|
||||||
|
provenanceValue,
|
||||||
|
provenance.RecordedAt,
|
||||||
|
new[] { ProvenanceFieldMasks.VersionRanges });
|
||||||
|
|
||||||
|
return new AffectedVersionRange(
|
||||||
|
rangeKind: "cpe",
|
||||||
|
introducedVersion: introduced,
|
||||||
|
fixedVersion: fixedVersion,
|
||||||
|
lastAffectedVersion: lastAffected,
|
||||||
|
rangeExpression: rangeExpression,
|
||||||
|
provenance: rangeProvenance,
|
||||||
|
primitives);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool TryBuildSemVerPrimitive(
|
||||||
|
string? introduced,
|
||||||
|
bool introducedInclusive,
|
||||||
|
string? fixedVersion,
|
||||||
|
bool fixedInclusive,
|
||||||
|
string? lastAffected,
|
||||||
|
bool lastInclusive,
|
||||||
|
string? exactVersion,
|
||||||
|
string? constraintExpression,
|
||||||
|
out SemVerPrimitive? primitive)
|
||||||
|
{
|
||||||
|
primitive = null;
|
||||||
|
|
||||||
|
if (!TryNormalizeSemVer(introduced, out var normalizedIntroduced)
|
||||||
|
|| !TryNormalizeSemVer(fixedVersion, out var normalizedFixed)
|
||||||
|
|| !TryNormalizeSemVer(lastAffected, out var normalizedLast)
|
||||||
|
|| !TryNormalizeSemVer(exactVersion, out var normalizedExact))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (normalizedIntroduced is null && normalizedFixed is null && normalizedLast is null && normalizedExact is null)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
primitive = new SemVerPrimitive(
|
||||||
|
Introduced: normalizedIntroduced,
|
||||||
|
IntroducedInclusive: normalizedIntroduced is null ? true : introducedInclusive,
|
||||||
|
Fixed: normalizedFixed,
|
||||||
|
FixedInclusive: normalizedFixed is null ? false : fixedInclusive,
|
||||||
|
LastAffected: normalizedLast,
|
||||||
|
LastAffectedInclusive: normalizedLast is null ? false : lastInclusive,
|
||||||
|
ConstraintExpression: constraintExpression,
|
||||||
|
ExactValue: normalizedExact);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool TryNormalizeSemVer(string? value, out string? normalized)
|
||||||
|
{
|
||||||
|
normalized = null;
|
||||||
|
if (string.IsNullOrWhiteSpace(value))
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
var trimmed = value.Trim();
|
||||||
|
if (trimmed.StartsWith("v", StringComparison.OrdinalIgnoreCase) && trimmed.Length > 1)
|
||||||
|
{
|
||||||
|
trimmed = trimmed[1..];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!NuGetVersion.TryParse(trimmed, out var parsed))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
normalized = parsed.ToNormalizedString();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? BuildCweUrl(string cweId)
|
||||||
|
{
|
||||||
|
var dashIndex = cweId.IndexOf('-');
|
||||||
|
if (dashIndex < 0 || dashIndex == cweId.Length - 1)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var digits = new StringBuilder();
|
||||||
|
for (var i = dashIndex + 1; i < cweId.Length; i++)
|
||||||
|
{
|
||||||
|
var ch = cweId[i];
|
||||||
|
if (char.IsDigit(ch))
|
||||||
|
{
|
||||||
|
digits.Append(ch);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return digits.Length == 0 ? null : $"https://cwe.mitre.org/data/definitions/{digits}.html";
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? TryExtractVersionFromCriteria(string criteria)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(criteria))
|
if (string.IsNullOrWhiteSpace(criteria))
|
||||||
{
|
{
|
||||||
@@ -763,12 +763,12 @@ internal static class NvdMapper
|
|||||||
return version;
|
return version;
|
||||||
}
|
}
|
||||||
|
|
||||||
private readonly record struct WeaknessMetadata(string CweId, string? Name);
|
private readonly record struct WeaknessMetadata(string CweId, string? Name);
|
||||||
|
|
||||||
private sealed class PackageAccumulator
|
private sealed class PackageAccumulator
|
||||||
{
|
{
|
||||||
public List<AffectedVersionRange> Ranges { get; } = new();
|
public List<AffectedVersionRange> Ranges { get; } = new();
|
||||||
|
|
||||||
public List<AdvisoryProvenance> Provenance { get; } = new();
|
public List<AdvisoryProvenance> Provenance { get; } = new();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ using StellaOps.Concelier.Connector.Nvd.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Nvd.Internal;
|
using StellaOps.Concelier.Connector.Nvd.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.ChangeHistory;
|
using StellaOps.Concelier.Storage.Mongo.ChangeHistory;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
using Json.Schema;
|
using Json.Schema;
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -20,8 +20,8 @@ using StellaOps.Concelier.Connector.Osv.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Osv.Internal;
|
using StellaOps.Concelier.Connector.Osv.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
using StellaOps.Cryptography;
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
@@ -426,7 +426,8 @@ public sealed class OsvConnector : IFeedConnector
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
var gridFsId = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, bytes, "application/json", null, cancellationToken).ConfigureAwait(false);
|
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||||
|
_ = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, bytes, "application/json", null, cancellationToken, recordId).ConfigureAwait(false);
|
||||||
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||||
{
|
{
|
||||||
["osv.ecosystem"] = ecosystem,
|
["osv.ecosystem"] = ecosystem,
|
||||||
@@ -434,7 +435,6 @@ public sealed class OsvConnector : IFeedConnector
|
|||||||
["osv.modified"] = modified.ToString("O"),
|
["osv.modified"] = modified.ToString("O"),
|
||||||
};
|
};
|
||||||
|
|
||||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
|
||||||
var record = new DocumentRecord(
|
var record = new DocumentRecord(
|
||||||
recordId,
|
recordId,
|
||||||
SourceName,
|
SourceName,
|
||||||
@@ -447,8 +447,9 @@ public sealed class OsvConnector : IFeedConnector
|
|||||||
Metadata: metadata,
|
Metadata: metadata,
|
||||||
Etag: null,
|
Etag: null,
|
||||||
LastModified: modified,
|
LastModified: modified,
|
||||||
PayloadId: gridFsId,
|
PayloadId: recordId,
|
||||||
ExpiresAt: null);
|
ExpiresAt: null,
|
||||||
|
Payload: bytes);
|
||||||
|
|
||||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||||
pendingDocuments.Add(upserted.Id);
|
pendingDocuments.Add(upserted.Id);
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ using System.Linq;
|
|||||||
using System.Text;
|
using System.Text;
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Normalization.Cvss;
|
using StellaOps.Concelier.Normalization.Cvss;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Ru.Bdu.Internal;
|
namespace StellaOps.Concelier.Connector.Ru.Bdu.Internal;
|
||||||
|
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Ru.Bdu.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Ru.Bdu.Internal;
|
using StellaOps.Concelier.Connector.Ru.Bdu.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
using StellaOps.Cryptography;
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
@@ -410,7 +410,8 @@ public sealed class RuBduConnector : IFeedConnector
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
var gridFsId = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken).ConfigureAwait(false);
|
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||||
|
_ = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken, recordId).ConfigureAwait(false);
|
||||||
|
|
||||||
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||||
{
|
{
|
||||||
@@ -422,7 +423,6 @@ public sealed class RuBduConnector : IFeedConnector
|
|||||||
metadata["ru-bdu.name"] = dto.Name!;
|
metadata["ru-bdu.name"] = dto.Name!;
|
||||||
}
|
}
|
||||||
|
|
||||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
|
||||||
var record = new DocumentRecord(
|
var record = new DocumentRecord(
|
||||||
recordId,
|
recordId,
|
||||||
SourceName,
|
SourceName,
|
||||||
@@ -435,8 +435,9 @@ public sealed class RuBduConnector : IFeedConnector
|
|||||||
Metadata: metadata,
|
Metadata: metadata,
|
||||||
Etag: null,
|
Etag: null,
|
||||||
LastModified: archiveLastModified ?? dto.IdentifyDate,
|
LastModified: archiveLastModified ?? dto.IdentifyDate,
|
||||||
PayloadId: gridFsId,
|
PayloadId: recordId,
|
||||||
ExpiresAt: null);
|
ExpiresAt: null,
|
||||||
|
Payload: payload);
|
||||||
|
|
||||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||||
pendingDocuments.Add(upserted.Id);
|
pendingDocuments.Add(upserted.Id);
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ using System.Linq;
|
|||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Normalization.Cvss;
|
using StellaOps.Concelier.Normalization.Cvss;
|
||||||
using StellaOps.Concelier.Normalization.SemVer;
|
using StellaOps.Concelier.Normalization.SemVer;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Ru.Nkcki.Internal;
|
namespace StellaOps.Concelier.Connector.Ru.Nkcki.Internal;
|
||||||
|
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Ru.Nkcki.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Ru.Nkcki.Internal;
|
using StellaOps.Concelier.Connector.Ru.Nkcki.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
using StellaOps.Cryptography;
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
@@ -609,7 +609,8 @@ public sealed class RuNkckiConnector : IFeedConnector
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
var gridFsId = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken).ConfigureAwait(false);
|
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||||
|
_ = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken, recordId).ConfigureAwait(false);
|
||||||
|
|
||||||
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||||
{
|
{
|
||||||
@@ -627,7 +628,6 @@ public sealed class RuNkckiConnector : IFeedConnector
|
|||||||
metadata["ru-nkcki.mitre_id"] = dto.MitreId!;
|
metadata["ru-nkcki.mitre_id"] = dto.MitreId!;
|
||||||
}
|
}
|
||||||
|
|
||||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
|
||||||
var lastModified = dto.DateUpdated ?? dto.DatePublished;
|
var lastModified = dto.DateUpdated ?? dto.DatePublished;
|
||||||
var record = new DocumentRecord(
|
var record = new DocumentRecord(
|
||||||
recordId,
|
recordId,
|
||||||
@@ -641,8 +641,9 @@ public sealed class RuNkckiConnector : IFeedConnector
|
|||||||
Metadata: metadata,
|
Metadata: metadata,
|
||||||
Etag: null,
|
Etag: null,
|
||||||
LastModified: lastModified,
|
LastModified: lastModified,
|
||||||
PayloadId: gridFsId,
|
PayloadId: recordId,
|
||||||
ExpiresAt: null);
|
ExpiresAt: null,
|
||||||
|
Payload: payload);
|
||||||
|
|
||||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||||
pendingDocuments.Add(upserted.Id);
|
pendingDocuments.Add(upserted.Id);
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.StellaOpsMirror.Settings;
|
|||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
using StellaOps.Cryptography;
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
@@ -226,7 +226,8 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
|
|||||||
return existing;
|
return existing;
|
||||||
}
|
}
|
||||||
|
|
||||||
var gridFsId = await _rawDocumentStorage.UploadAsync(Source, absolute, payload, contentType, cancellationToken).ConfigureAwait(false);
|
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||||
|
_ = await _rawDocumentStorage.UploadAsync(Source, absolute, payload, contentType, ExpiresAt: null, cancellationToken, recordId).ConfigureAwait(false);
|
||||||
var now = _timeProvider.GetUtcNow();
|
var now = _timeProvider.GetUtcNow();
|
||||||
var sha = ComputeSha256(payload);
|
var sha = ComputeSha256(payload);
|
||||||
|
|
||||||
@@ -240,7 +241,7 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
|
|||||||
};
|
};
|
||||||
|
|
||||||
var record = new DocumentRecord(
|
var record = new DocumentRecord(
|
||||||
existing?.Id ?? Guid.NewGuid(),
|
recordId,
|
||||||
Source,
|
Source,
|
||||||
absolute,
|
absolute,
|
||||||
now,
|
now,
|
||||||
@@ -251,8 +252,9 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
|
|||||||
Metadata: metadata,
|
Metadata: metadata,
|
||||||
Etag: null,
|
Etag: null,
|
||||||
LastModified: generatedAt,
|
LastModified: generatedAt,
|
||||||
PayloadId: gridFsId,
|
PayloadId: recordId,
|
||||||
ExpiresAt: null);
|
ExpiresAt: null,
|
||||||
|
Payload: payload);
|
||||||
|
|
||||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Vndr.Adobe.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Vndr.Adobe.Internal;
|
using StellaOps.Concelier.Connector.Vndr.Adobe.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Vndr.Adobe.Internal;
|
namespace StellaOps.Concelier.Connector.Vndr.Adobe.Internal;
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Common.Fetch;
|
|||||||
using StellaOps.Concelier.Connector.Vndr.Apple.Internal;
|
using StellaOps.Concelier.Connector.Vndr.Apple.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ using System.Linq;
|
|||||||
using StellaOps.Concelier.Models;
|
using StellaOps.Concelier.Models;
|
||||||
using StellaOps.Concelier.Connector.Common;
|
using StellaOps.Concelier.Connector.Common;
|
||||||
using StellaOps.Concelier.Connector.Common.Packages;
|
using StellaOps.Concelier.Connector.Common.Packages;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Vndr.Apple.Internal;
|
namespace StellaOps.Concelier.Connector.Vndr.Apple.Internal;
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Vndr.Chromium.Configuration;
|
|||||||
using StellaOps.Concelier.Connector.Vndr.Chromium.Internal;
|
using StellaOps.Concelier.Connector.Vndr.Chromium.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
using Json.Schema;
|
using Json.Schema;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Vndr.Chromium.Internal;
|
namespace StellaOps.Concelier.Connector.Vndr.Chromium.Internal;
|
||||||
|
|
||||||
|
|||||||
@@ -6,15 +6,14 @@ using System.Text.Json.Serialization;
|
|||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
using MongoDB.Bson;
|
using MongoDB.Bson;
|
||||||
using MongoDB.Driver;
|
|
||||||
using StellaOps.Concelier.Connector.Common;
|
using StellaOps.Concelier.Connector.Common;
|
||||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||||
using StellaOps.Concelier.Connector.Vndr.Cisco.Configuration;
|
using StellaOps.Concelier.Connector.Vndr.Cisco.Configuration;
|
||||||
using StellaOps.Concelier.Connector.Vndr.Cisco.Internal;
|
using StellaOps.Concelier.Connector.Vndr.Cisco.Internal;
|
||||||
using StellaOps.Concelier.Storage.Mongo;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
using StellaOps.Concelier.Storage.Mongo;
|
||||||
using StellaOps.Plugin;
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
namespace StellaOps.Concelier.Connector.Vndr.Cisco;
|
namespace StellaOps.Concelier.Connector.Vndr.Cisco;
|
||||||
@@ -138,19 +137,16 @@ public sealed class CiscoConnector : IFeedConnector
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
ObjectId gridFsId;
|
|
||||||
try
|
|
||||||
{
|
|
||||||
gridFsId = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", cancellationToken).ConfigureAwait(false);
|
|
||||||
}
|
|
||||||
catch (MongoWriteException ex)
|
|
||||||
{
|
|
||||||
_diagnostics.FetchFailure();
|
|
||||||
_logger.LogError(ex, "Failed to upload Cisco advisory {AdvisoryId} to GridFS", advisory.AdvisoryId);
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
|
|
||||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||||
|
_ = await _rawDocumentStorage.UploadAsync(
|
||||||
|
SourceName,
|
||||||
|
documentUri,
|
||||||
|
payload,
|
||||||
|
"application/json",
|
||||||
|
ExpiresAt: null,
|
||||||
|
cancellationToken,
|
||||||
|
recordId).ConfigureAwait(false);
|
||||||
|
|
||||||
var record = new DocumentRecord(
|
var record = new DocumentRecord(
|
||||||
recordId,
|
recordId,
|
||||||
SourceName,
|
SourceName,
|
||||||
@@ -163,8 +159,9 @@ public sealed class CiscoConnector : IFeedConnector
|
|||||||
BuildMetadata(advisory),
|
BuildMetadata(advisory),
|
||||||
Etag: null,
|
Etag: null,
|
||||||
LastModified: advisory.LastUpdated ?? advisory.FirstPublished ?? now,
|
LastModified: advisory.LastUpdated ?? advisory.FirstPublished ?? now,
|
||||||
PayloadId: gridFsId,
|
PayloadId: recordId,
|
||||||
ExpiresAt: null);
|
ExpiresAt: null,
|
||||||
|
Payload: payload);
|
||||||
|
|
||||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||||
pendingDocuments.Add(upserted.Id);
|
pendingDocuments.Add(upserted.Id);
|
||||||
@@ -221,7 +218,7 @@ public sealed class CiscoConnector : IFeedConnector
|
|||||||
latestModified,
|
latestModified,
|
||||||
latestAdvisoryId);
|
latestAdvisoryId);
|
||||||
}
|
}
|
||||||
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or JsonException or MongoException)
|
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or JsonException)
|
||||||
{
|
{
|
||||||
_diagnostics.FetchFailure();
|
_diagnostics.FetchFailure();
|
||||||
_logger.LogError(ex, "Cisco fetch failed");
|
_logger.LogError(ex, "Cisco fetch failed");
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user