commit and up
This commit is contained in:
		
							
								
								
									
										297
									
								
								.gitea/workflows/build-test-deploy.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										297
									
								
								.gitea/workflows/build-test-deploy.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,297 @@ | |||||||
|  | # .gitea/workflows/build-test-deploy.yml | ||||||
|  | # Unified CI/CD workflow for git.stella-ops.org (Feedser monorepo) | ||||||
|  |  | ||||||
|  | name: Build Test Deploy | ||||||
|  |  | ||||||
|  | on: | ||||||
|  |   push: | ||||||
|  |     branches: [ main ] | ||||||
|  |     paths: | ||||||
|  |       - 'src/**' | ||||||
|  |       - 'docs/**' | ||||||
|  |       - 'scripts/**' | ||||||
|  |       - 'Directory.Build.props' | ||||||
|  |       - 'Directory.Build.targets' | ||||||
|  |       - 'global.json' | ||||||
|  |       - '.gitea/workflows/**' | ||||||
|  |   pull_request: | ||||||
|  |     branches: [ main, develop ] | ||||||
|  |     paths: | ||||||
|  |       - 'src/**' | ||||||
|  |       - 'docs/**' | ||||||
|  |       - 'scripts/**' | ||||||
|  |       - '.gitea/workflows/**' | ||||||
|  |   workflow_dispatch: | ||||||
|  |     inputs: | ||||||
|  |       force_deploy: | ||||||
|  |         description: 'Ignore branch checks and run the deploy stage' | ||||||
|  |         required: false | ||||||
|  |         default: 'false' | ||||||
|  |         type: boolean | ||||||
|  |  | ||||||
|  | env: | ||||||
|  |   DOTNET_VERSION: '10.0.100-rc.1.25451.107' | ||||||
|  |   BUILD_CONFIGURATION: Release | ||||||
|  |   CI_CACHE_ROOT: /data/.cache/stella-ops/feedser | ||||||
|  |   RUNNER_TOOL_CACHE: /toolcache | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   build-test: | ||||||
|  |     runs-on: ubuntu-22.04 | ||||||
|  |     environment: ${{ github.event_name == 'pull_request' && 'preview' || 'staging' }} | ||||||
|  |     env: | ||||||
|  |       PUBLISH_DIR: ${{ github.workspace }}/artifacts/publish/webservice | ||||||
|  |       TEST_RESULTS_DIR: ${{ github.workspace }}/artifacts/test-results | ||||||
|  |     steps: | ||||||
|  |       - name: Checkout repository | ||||||
|  |         uses: actions/checkout@v4 | ||||||
|  |         with: | ||||||
|  |           fetch-depth: 0 | ||||||
|  |  | ||||||
|  |       - name: Setup .NET ${{ env.DOTNET_VERSION }} | ||||||
|  |         uses: actions/setup-dotnet@v4 | ||||||
|  |         with: | ||||||
|  |           dotnet-version: ${{ env.DOTNET_VERSION }} | ||||||
|  |           include-prerelease: true | ||||||
|  |  | ||||||
|  |       - name: Restore dependencies | ||||||
|  |         run: dotnet restore src/StellaOps.Feedser.sln | ||||||
|  |  | ||||||
|  |       - name: Build solution (warnings as errors) | ||||||
|  |         run: dotnet build src/StellaOps.Feedser.sln --configuration $BUILD_CONFIGURATION --no-restore -warnaserror | ||||||
|  |  | ||||||
|  |       - name: Run unit and integration tests | ||||||
|  |         run: | | ||||||
|  |           mkdir -p "$TEST_RESULTS_DIR" | ||||||
|  |           dotnet test src/StellaOps.Feedser.sln \ | ||||||
|  |             --configuration $BUILD_CONFIGURATION \ | ||||||
|  |             --no-build \ | ||||||
|  |             --logger "trx;LogFileName=stellaops-feedser-tests.trx" \ | ||||||
|  |             --results-directory "$TEST_RESULTS_DIR" | ||||||
|  |  | ||||||
|  |       - name: Publish Feedser web service | ||||||
|  |         run: | | ||||||
|  |           mkdir -p "$PUBLISH_DIR" | ||||||
|  |           dotnet publish src/StellaOps.Feedser.WebService/StellaOps.Feedser.WebService.csproj \ | ||||||
|  |             --configuration $BUILD_CONFIGURATION \ | ||||||
|  |             --no-build \ | ||||||
|  |             --output "$PUBLISH_DIR" | ||||||
|  |  | ||||||
|  |       - name: Upload published artifacts | ||||||
|  |         uses: actions/upload-artifact@v4 | ||||||
|  |         with: | ||||||
|  |           name: feedser-publish | ||||||
|  |           path: ${{ env.PUBLISH_DIR }} | ||||||
|  |           if-no-files-found: error | ||||||
|  |           retention-days: 7 | ||||||
|  |  | ||||||
|  |       - name: Upload test results | ||||||
|  |         if: always() | ||||||
|  |         uses: actions/upload-artifact@v4 | ||||||
|  |         with: | ||||||
|  |           name: feedser-test-results | ||||||
|  |           path: ${{ env.TEST_RESULTS_DIR }} | ||||||
|  |           if-no-files-found: ignore | ||||||
|  |           retention-days: 7 | ||||||
|  |  | ||||||
|  |   docs: | ||||||
|  |     runs-on: ubuntu-22.04 | ||||||
|  |     env: | ||||||
|  |       DOCS_OUTPUT_DIR: ${{ github.workspace }}/artifacts/docs-site | ||||||
|  |     steps: | ||||||
|  |       - name: Checkout repository | ||||||
|  |         uses: actions/checkout@v4 | ||||||
|  |  | ||||||
|  |       - name: Setup Python | ||||||
|  |         uses: actions/setup-python@v5 | ||||||
|  |         with: | ||||||
|  |           python-version: '3.11' | ||||||
|  |  | ||||||
|  |       - name: Install documentation dependencies | ||||||
|  |         run: | | ||||||
|  |           python -m pip install --upgrade pip | ||||||
|  |           python -m pip install markdown pygments | ||||||
|  |  | ||||||
|  |       - name: Render documentation bundle | ||||||
|  |         run: | | ||||||
|  |           python scripts/render_docs.py --source docs --output "$DOCS_OUTPUT_DIR" --clean | ||||||
|  |  | ||||||
|  |       - name: Upload documentation artifact | ||||||
|  |         uses: actions/upload-artifact@v4 | ||||||
|  |         with: | ||||||
|  |           name: feedser-docs-site | ||||||
|  |           path: ${{ env.DOCS_OUTPUT_DIR }} | ||||||
|  |           if-no-files-found: error | ||||||
|  |           retention-days: 7 | ||||||
|  |  | ||||||
|  |   deploy: | ||||||
|  |     runs-on: ubuntu-22.04 | ||||||
|  |     needs: [build-test, docs] | ||||||
|  |     if: >- | ||||||
|  |       needs.build-test.result == 'success' && | ||||||
|  |       needs.docs.result == 'success' && | ||||||
|  |       ( | ||||||
|  |         (github.event_name == 'push' && github.ref == 'refs/heads/main') || | ||||||
|  |         github.event_name == 'workflow_dispatch' | ||||||
|  |       ) | ||||||
|  |     environment: staging | ||||||
|  |     steps: | ||||||
|  |       - name: Checkout repository | ||||||
|  |         uses: actions/checkout@v4 | ||||||
|  |         with: | ||||||
|  |           sparse-checkout: | | ||||||
|  |             scripts | ||||||
|  |             .gitea/workflows | ||||||
|  |           sparse-checkout-cone-mode: true | ||||||
|  |  | ||||||
|  |       - name: Check if deployment should proceed | ||||||
|  |         id: check-deploy | ||||||
|  |         run: | | ||||||
|  |           if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then | ||||||
|  |             if [ "${{ github.event.inputs.force_deploy }}" = "true" ]; then | ||||||
|  |               echo "should-deploy=true" >> $GITHUB_OUTPUT | ||||||
|  |               echo "✅ Manual deployment requested" | ||||||
|  |             else | ||||||
|  |               echo "should-deploy=false" >> $GITHUB_OUTPUT | ||||||
|  |               echo "ℹ️ Manual dispatch without force_deploy=true — skipping" | ||||||
|  |             fi | ||||||
|  |           elif [ "${{ github.ref }}" = "refs/heads/main" ]; then | ||||||
|  |             echo "should-deploy=true" >> $GITHUB_OUTPUT | ||||||
|  |             echo "✅ Deploying latest main branch build" | ||||||
|  |           else | ||||||
|  |             echo "should-deploy=false" >> $GITHUB_OUTPUT | ||||||
|  |             echo "ℹ️ Deployment restricted to main branch" | ||||||
|  |           fi | ||||||
|  |  | ||||||
|  |       - name: Resolve deployment credentials | ||||||
|  |         id: params | ||||||
|  |         if: steps.check-deploy.outputs.should-deploy == 'true' | ||||||
|  |         run: | | ||||||
|  |           missing=() | ||||||
|  |  | ||||||
|  |           host="${{ secrets.STAGING_DEPLOYMENT_HOST }}" | ||||||
|  |           if [ -z "$host" ]; then host="${{ vars.STAGING_DEPLOYMENT_HOST }}"; fi | ||||||
|  |           if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi | ||||||
|  |           if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi | ||||||
|  |           if [ -z "$host" ]; then missing+=("STAGING_DEPLOYMENT_HOST"); fi | ||||||
|  |  | ||||||
|  |           user="${{ secrets.STAGING_DEPLOYMENT_USERNAME }}" | ||||||
|  |           if [ -z "$user" ]; then user="${{ vars.STAGING_DEPLOYMENT_USERNAME }}"; fi | ||||||
|  |           if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi | ||||||
|  |           if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi | ||||||
|  |           if [ -z "$user" ]; then missing+=("STAGING_DEPLOYMENT_USERNAME"); fi | ||||||
|  |  | ||||||
|  |           path="${{ secrets.STAGING_DEPLOYMENT_PATH }}" | ||||||
|  |           if [ -z "$path" ]; then path="${{ vars.STAGING_DEPLOYMENT_PATH }}"; fi | ||||||
|  |  | ||||||
|  |           docs_path="${{ secrets.STAGING_DOCS_PATH }}" | ||||||
|  |           if [ -z "$docs_path" ]; then docs_path="${{ vars.STAGING_DOCS_PATH }}"; fi | ||||||
|  |  | ||||||
|  |           key="${{ secrets.STAGING_DEPLOYMENT_KEY }}" | ||||||
|  |           if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi | ||||||
|  |           if [ -z "$key" ]; then key="${{ vars.STAGING_DEPLOYMENT_KEY }}"; fi | ||||||
|  |           if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi | ||||||
|  |           if [ -z "$key" ]; then missing+=("STAGING_DEPLOYMENT_KEY"); fi | ||||||
|  |  | ||||||
|  |           if [ ${#missing[@]} -gt 0 ]; then | ||||||
|  |             echo "❌ Missing deployment configuration: ${missing[*]}" | ||||||
|  |             exit 1 | ||||||
|  |           fi | ||||||
|  |  | ||||||
|  |           key_file="$RUNNER_TEMP/staging_deploy_key" | ||||||
|  |           printf '%s\n' "$key" > "$key_file" | ||||||
|  |           chmod 600 "$key_file" | ||||||
|  |  | ||||||
|  |           echo "host=$host" >> $GITHUB_OUTPUT | ||||||
|  |           echo "user=$user" >> $GITHUB_OUTPUT | ||||||
|  |           echo "path=$path" >> $GITHUB_OUTPUT | ||||||
|  |           echo "docs-path=$docs_path" >> $GITHUB_OUTPUT | ||||||
|  |           echo "key-file=$key_file" >> $GITHUB_OUTPUT | ||||||
|  |  | ||||||
|  |       - name: Download service artifact | ||||||
|  |         if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs.path != '' | ||||||
|  |         uses: actions/download-artifact@v4 | ||||||
|  |         with: | ||||||
|  |           name: feedser-publish | ||||||
|  |           path: artifacts/service | ||||||
|  |  | ||||||
|  |       - name: Download documentation artifact | ||||||
|  |         if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs['docs-path'] != '' | ||||||
|  |         uses: actions/download-artifact@v4 | ||||||
|  |         with: | ||||||
|  |           name: feedser-docs-site | ||||||
|  |           path: artifacts/docs | ||||||
|  |  | ||||||
|  |       - name: Install rsync | ||||||
|  |         if: steps.check-deploy.outputs.should-deploy == 'true' | ||||||
|  |         run: | | ||||||
|  |           if command -v rsync >/dev/null 2>&1; then | ||||||
|  |             exit 0 | ||||||
|  |           fi | ||||||
|  |           CACHE_DIR="${CI_CACHE_ROOT:-/tmp}/apt" | ||||||
|  |           mkdir -p "$CACHE_DIR" | ||||||
|  |           KEY="rsync-$(lsb_release -rs 2>/dev/null || echo unknown)" | ||||||
|  |           DEB_DIR="$CACHE_DIR/$KEY" | ||||||
|  |           mkdir -p "$DEB_DIR" | ||||||
|  |           if ls "$DEB_DIR"/rsync*.deb >/dev/null 2>&1; then | ||||||
|  |             apt-get update | ||||||
|  |             apt-get install -y --no-install-recommends "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb | ||||||
|  |           else | ||||||
|  |             apt-get update | ||||||
|  |             apt-get download rsync libpopt0 | ||||||
|  |             mv rsync*.deb libpopt0*.deb "$DEB_DIR"/ | ||||||
|  |             dpkg -i "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb || apt-get install -f -y | ||||||
|  |           fi | ||||||
|  |  | ||||||
|  |       - name: Deploy service bundle | ||||||
|  |         if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs.path != '' | ||||||
|  |         env: | ||||||
|  |           HOST: ${{ steps.params.outputs.host }} | ||||||
|  |           USER: ${{ steps.params.outputs.user }} | ||||||
|  |           TARGET: ${{ steps.params.outputs.path }} | ||||||
|  |           KEY_FILE: ${{ steps.params.outputs['key-file'] }} | ||||||
|  |         run: | | ||||||
|  |           SERVICE_DIR="artifacts/service/feedser-publish" | ||||||
|  |           if [ ! -d "$SERVICE_DIR" ]; then | ||||||
|  |             echo "❌ Service artifact directory missing ($SERVICE_DIR)" | ||||||
|  |             exit 1 | ||||||
|  |           fi | ||||||
|  |           echo "🚀 Deploying Feedser web service to $HOST:$TARGET" | ||||||
|  |           rsync -az --delete \ | ||||||
|  |             -e "ssh -i $KEY_FILE -o StrictHostKeyChecking=no" \ | ||||||
|  |             "$SERVICE_DIR"/ \ | ||||||
|  |             "$USER@$HOST:$TARGET/" | ||||||
|  |  | ||||||
|  |       - name: Deploy documentation bundle | ||||||
|  |         if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs['docs-path'] != '' | ||||||
|  |         env: | ||||||
|  |           HOST: ${{ steps.params.outputs.host }} | ||||||
|  |           USER: ${{ steps.params.outputs.user }} | ||||||
|  |           DOCS_TARGET: ${{ steps.params.outputs['docs-path'] }} | ||||||
|  |           KEY_FILE: ${{ steps.params.outputs['key-file'] }} | ||||||
|  |         run: | | ||||||
|  |           DOCS_DIR="artifacts/docs/feedser-docs-site" | ||||||
|  |           if [ ! -d "$DOCS_DIR" ]; then | ||||||
|  |             echo "❌ Documentation artifact directory missing ($DOCS_DIR)" | ||||||
|  |             exit 1 | ||||||
|  |           fi | ||||||
|  |           echo "📚 Deploying documentation bundle to $HOST:$DOCS_TARGET" | ||||||
|  |           rsync -az --delete \ | ||||||
|  |             -e "ssh -i $KEY_FILE -o StrictHostKeyChecking=no" \ | ||||||
|  |             "$DOCS_DIR"/ \ | ||||||
|  |             "$USER@$HOST:$DOCS_TARGET/" | ||||||
|  |  | ||||||
|  |       - name: Deployment summary | ||||||
|  |         if: steps.check-deploy.outputs.should-deploy == 'true' | ||||||
|  |         run: | | ||||||
|  |           echo "✅ Deployment completed" | ||||||
|  |           echo "   Host: ${{ steps.params.outputs.host }}" | ||||||
|  |           echo "   Service path: ${{ steps.params.outputs.path || '(skipped)' }}" | ||||||
|  |           echo "   Docs path: ${{ steps.params.outputs['docs-path'] || '(skipped)' }}" | ||||||
|  |  | ||||||
|  |       - name: Deployment skipped summary | ||||||
|  |         if: steps.check-deploy.outputs.should-deploy != 'true' | ||||||
|  |         run: | | ||||||
|  |           echo "ℹ️ Deployment stage skipped" | ||||||
|  |           echo "   Event: ${{ github.event_name }}" | ||||||
|  |           echo "   Ref: ${{ github.ref }}" | ||||||
| @@ -1,30 +1,70 @@ | |||||||
| name: Docs CI | # .gitea/workflows/docs.yml | ||||||
|  | # Documentation quality checks and preview artefacts | ||||||
| on: |  | ||||||
|   pull_request: | name: Docs CI | ||||||
|     paths: |  | ||||||
|       - 'docs/**' | on: | ||||||
|       - '.github/workflows/docs.yml' |   push: | ||||||
|  |     paths: | ||||||
| jobs: |       - 'docs/**' | ||||||
|   lint: |       - 'scripts/render_docs.py' | ||||||
|     runs-on: ubuntu-latest |       - '.gitea/workflows/docs.yml' | ||||||
|     steps: |   pull_request: | ||||||
|       - uses: actions/checkout@v4 |     paths: | ||||||
|  |       - 'docs/**' | ||||||
|       - name: Set up Node |       - 'scripts/render_docs.py' | ||||||
|         uses: actions/setup-node@v4 |       - '.gitea/workflows/docs.yml' | ||||||
|         with: |   workflow_dispatch: {} | ||||||
|           node-version: '20' |  | ||||||
|  | env: | ||||||
|       - name: Install markdown linters |   NODE_VERSION: '20' | ||||||
|         run: | |   PYTHON_VERSION: '3.11' | ||||||
|           npm install -g markdown-link-check remark-cli remark-preset-lint-recommended |  | ||||||
|  | jobs: | ||||||
|       - name: Link check |   lint-and-preview: | ||||||
|         run: | |     runs-on: ubuntu-22.04 | ||||||
|           find docs -name '*.md' -print0 | xargs -0 -n1 markdown-link-check -q |     env: | ||||||
|  |       DOCS_OUTPUT_DIR: ${{ github.workspace }}/artifacts/docs-preview | ||||||
|       - name: Remark lint |     steps: | ||||||
|         run: | |       - name: Checkout repository | ||||||
|           remark docs -qf |         uses: actions/checkout@v4 | ||||||
|  |  | ||||||
|  |       - name: Setup Node.js | ||||||
|  |         uses: actions/setup-node@v4 | ||||||
|  |         with: | ||||||
|  |           node-version: ${{ env.NODE_VERSION }} | ||||||
|  |  | ||||||
|  |       - name: Install markdown linters | ||||||
|  |         run: | | ||||||
|  |           npm install markdown-link-check remark-cli remark-preset-lint-recommended | ||||||
|  |  | ||||||
|  |       - name: Link check | ||||||
|  |         run: | | ||||||
|  |           find docs -name '*.md' -print0 | \ | ||||||
|  |             xargs -0 -n1 -I{} npx markdown-link-check --quiet '{}' | ||||||
|  |  | ||||||
|  |       - name: Remark lint | ||||||
|  |         run: | | ||||||
|  |           npx remark docs -qf | ||||||
|  |  | ||||||
|  |       - name: Setup Python | ||||||
|  |         uses: actions/setup-python@v5 | ||||||
|  |         with: | ||||||
|  |           python-version: ${{ env.PYTHON_VERSION }} | ||||||
|  |  | ||||||
|  |       - name: Install documentation dependencies | ||||||
|  |         run: | | ||||||
|  |           python -m pip install --upgrade pip | ||||||
|  |           python -m pip install markdown pygments | ||||||
|  |  | ||||||
|  |       - name: Render documentation preview bundle | ||||||
|  |         run: | | ||||||
|  |           python scripts/render_docs.py --source docs --output "$DOCS_OUTPUT_DIR" --clean | ||||||
|  |  | ||||||
|  |       - name: Upload documentation preview | ||||||
|  |         if: always() | ||||||
|  |         uses: actions/upload-artifact@v4 | ||||||
|  |         with: | ||||||
|  |           name: feedser-docs-preview | ||||||
|  |           path: ${{ env.DOCS_OUTPUT_DIR }} | ||||||
|  |           retention-days: 7 | ||||||
|   | |||||||
							
								
								
									
										206
									
								
								.gitea/workflows/promote.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										206
									
								
								.gitea/workflows/promote.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,206 @@ | |||||||
|  | # .gitea/workflows/promote.yml | ||||||
|  | # Manual promotion workflow to copy staged artefacts to production | ||||||
|  |  | ||||||
|  | name: Promote Feedser (Manual) | ||||||
|  |  | ||||||
|  | on: | ||||||
|  |   workflow_dispatch: | ||||||
|  |     inputs: | ||||||
|  |       include_docs: | ||||||
|  |         description: 'Also promote the generated documentation bundle' | ||||||
|  |         required: false | ||||||
|  |         default: 'true' | ||||||
|  |         type: boolean | ||||||
|  |       tag: | ||||||
|  |         description: 'Optional build identifier to record in the summary' | ||||||
|  |         required: false | ||||||
|  |         default: 'latest' | ||||||
|  |         type: string | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   promote: | ||||||
|  |     runs-on: ubuntu-22.04 | ||||||
|  |     environment: production | ||||||
|  |     steps: | ||||||
|  |       - name: Checkout repository | ||||||
|  |         uses: actions/checkout@v4 | ||||||
|  |  | ||||||
|  |       - name: Resolve staging credentials | ||||||
|  |         id: staging | ||||||
|  |         run: | | ||||||
|  |           missing=() | ||||||
|  |  | ||||||
|  |           host="${{ secrets.STAGING_DEPLOYMENT_HOST }}" | ||||||
|  |           if [ -z "$host" ]; then host="${{ vars.STAGING_DEPLOYMENT_HOST }}"; fi | ||||||
|  |           if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi | ||||||
|  |           if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi | ||||||
|  |           if [ -z "$host" ]; then missing+=("STAGING_DEPLOYMENT_HOST"); fi | ||||||
|  |  | ||||||
|  |           user="${{ secrets.STAGING_DEPLOYMENT_USERNAME }}" | ||||||
|  |           if [ -z "$user" ]; then user="${{ vars.STAGING_DEPLOYMENT_USERNAME }}"; fi | ||||||
|  |           if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi | ||||||
|  |           if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi | ||||||
|  |           if [ -z "$user" ]; then missing+=("STAGING_DEPLOYMENT_USERNAME"); fi | ||||||
|  |  | ||||||
|  |           path="${{ secrets.STAGING_DEPLOYMENT_PATH }}" | ||||||
|  |           if [ -z "$path" ]; then path="${{ vars.STAGING_DEPLOYMENT_PATH }}"; fi | ||||||
|  |           if [ -z "$path" ]; then missing+=("STAGING_DEPLOYMENT_PATH") | ||||||
|  |           fi | ||||||
|  |  | ||||||
|  |           docs_path="${{ secrets.STAGING_DOCS_PATH }}" | ||||||
|  |           if [ -z "$docs_path" ]; then docs_path="${{ vars.STAGING_DOCS_PATH }}"; fi | ||||||
|  |  | ||||||
|  |           key="${{ secrets.STAGING_DEPLOYMENT_KEY }}" | ||||||
|  |           if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi | ||||||
|  |           if [ -z "$key" ]; then key="${{ vars.STAGING_DEPLOYMENT_KEY }}"; fi | ||||||
|  |           if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi | ||||||
|  |           if [ -z "$key" ]; then missing+=("STAGING_DEPLOYMENT_KEY"); fi | ||||||
|  |  | ||||||
|  |           if [ ${#missing[@]} -gt 0 ]; then | ||||||
|  |             echo "❌ Missing staging configuration: ${missing[*]}" | ||||||
|  |             exit 1 | ||||||
|  |           fi | ||||||
|  |  | ||||||
|  |           key_file="$RUNNER_TEMP/staging_key" | ||||||
|  |           printf '%s\n' "$key" > "$key_file" | ||||||
|  |           chmod 600 "$key_file" | ||||||
|  |  | ||||||
|  |           echo "host=$host" >> $GITHUB_OUTPUT | ||||||
|  |           echo "user=$user" >> $GITHUB_OUTPUT | ||||||
|  |           echo "path=$path" >> $GITHUB_OUTPUT | ||||||
|  |           echo "docs-path=$docs_path" >> $GITHUB_OUTPUT | ||||||
|  |           echo "key-file=$key_file" >> $GITHUB_OUTPUT | ||||||
|  |  | ||||||
|  |       - name: Resolve production credentials | ||||||
|  |         id: production | ||||||
|  |         run: | | ||||||
|  |           missing=() | ||||||
|  |  | ||||||
|  |           host="${{ secrets.PRODUCTION_DEPLOYMENT_HOST }}" | ||||||
|  |           if [ -z "$host" ]; then host="${{ vars.PRODUCTION_DEPLOYMENT_HOST }}"; fi | ||||||
|  |           if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi | ||||||
|  |           if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi | ||||||
|  |           if [ -z "$host" ]; then missing+=("PRODUCTION_DEPLOYMENT_HOST"); fi | ||||||
|  |  | ||||||
|  |           user="${{ secrets.PRODUCTION_DEPLOYMENT_USERNAME }}" | ||||||
|  |           if [ -z "$user" ]; then user="${{ vars.PRODUCTION_DEPLOYMENT_USERNAME }}"; fi | ||||||
|  |           if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi | ||||||
|  |           if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi | ||||||
|  |           if [ -z "$user" ]; then missing+=("PRODUCTION_DEPLOYMENT_USERNAME"); fi | ||||||
|  |  | ||||||
|  |           path="${{ secrets.PRODUCTION_DEPLOYMENT_PATH }}" | ||||||
|  |           if [ -z "$path" ]; then path="${{ vars.PRODUCTION_DEPLOYMENT_PATH }}"; fi | ||||||
|  |           if [ -z "$path" ]; then missing+=("PRODUCTION_DEPLOYMENT_PATH") | ||||||
|  |           fi | ||||||
|  |  | ||||||
|  |           docs_path="${{ secrets.PRODUCTION_DOCS_PATH }}" | ||||||
|  |           if [ -z "$docs_path" ]; then docs_path="${{ vars.PRODUCTION_DOCS_PATH }}"; fi | ||||||
|  |  | ||||||
|  |           key="${{ secrets.PRODUCTION_DEPLOYMENT_KEY }}" | ||||||
|  |           if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi | ||||||
|  |           if [ -z "$key" ]; then key="${{ vars.PRODUCTION_DEPLOYMENT_KEY }}"; fi | ||||||
|  |           if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi | ||||||
|  |           if [ -z "$key" ]; then missing+=("PRODUCTION_DEPLOYMENT_KEY"); fi | ||||||
|  |  | ||||||
|  |           if [ ${#missing[@]} -gt 0 ]; then | ||||||
|  |             echo "❌ Missing production configuration: ${missing[*]}" | ||||||
|  |             exit 1 | ||||||
|  |           fi | ||||||
|  |  | ||||||
|  |           key_file="$RUNNER_TEMP/production_key" | ||||||
|  |           printf '%s\n' "$key" > "$key_file" | ||||||
|  |           chmod 600 "$key_file" | ||||||
|  |  | ||||||
|  |           echo "host=$host" >> $GITHUB_OUTPUT | ||||||
|  |           echo "user=$user" >> $GITHUB_OUTPUT | ||||||
|  |           echo "path=$path" >> $GITHUB_OUTPUT | ||||||
|  |           echo "docs-path=$docs_path" >> $GITHUB_OUTPUT | ||||||
|  |           echo "key-file=$key_file" >> $GITHUB_OUTPUT | ||||||
|  |  | ||||||
|  |       - name: Install rsync | ||||||
|  |         run: | | ||||||
|  |           if command -v rsync >/dev/null 2>&1; then | ||||||
|  |             exit 0 | ||||||
|  |           fi | ||||||
|  |           CACHE_DIR="${CI_CACHE_ROOT:-/tmp}/apt" | ||||||
|  |           mkdir -p "$CACHE_DIR" | ||||||
|  |           KEY="rsync-$(lsb_release -rs 2>/dev/null || echo unknown)" | ||||||
|  |           DEB_DIR="$CACHE_DIR/$KEY" | ||||||
|  |           mkdir -p "$DEB_DIR" | ||||||
|  |           if ls "$DEB_DIR"/rsync*.deb >/dev/null 2>&1; then | ||||||
|  |             apt-get update | ||||||
|  |             apt-get install -y --no-install-recommends "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb | ||||||
|  |           else | ||||||
|  |             apt-get update | ||||||
|  |             apt-get download rsync libpopt0 | ||||||
|  |             mv rsync*.deb libpopt0*.deb "$DEB_DIR"/ | ||||||
|  |             dpkg -i "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb || apt-get install -f -y | ||||||
|  |           fi | ||||||
|  |  | ||||||
|  |       - name: Fetch staging artefacts | ||||||
|  |         id: fetch | ||||||
|  |         run: | | ||||||
|  |           staging_root="${{ runner.temp }}/staging" | ||||||
|  |           mkdir -p "$staging_root/service" "$staging_root/docs" | ||||||
|  |  | ||||||
|  |           echo "📥 Copying service bundle from staging" | ||||||
|  |           rsync -az --delete \ | ||||||
|  |             -e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \ | ||||||
|  |             "${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs.path }}/" \ | ||||||
|  |             "$staging_root/service/" | ||||||
|  |  | ||||||
|  |           if [ "${{ github.event.inputs.include_docs }}" = "true" ] && [ -n "${{ steps.staging.outputs['docs-path'] }}" ]; then | ||||||
|  |             echo "📥 Copying documentation bundle from staging" | ||||||
|  |             rsync -az --delete \ | ||||||
|  |               -e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \ | ||||||
|  |               "${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs['docs-path'] }}/" \ | ||||||
|  |               "$staging_root/docs/" | ||||||
|  |           else | ||||||
|  |             echo "ℹ️ Documentation promotion skipped" | ||||||
|  |           fi | ||||||
|  |  | ||||||
|  |           echo "service-dir=$staging_root/service" >> $GITHUB_OUTPUT | ||||||
|  |           echo "docs-dir=$staging_root/docs" >> $GITHUB_OUTPUT | ||||||
|  |  | ||||||
|  |       - name: Backup production service content | ||||||
|  |         run: | | ||||||
|  |           ssh -o StrictHostKeyChecking=no -i "${{ steps.production.outputs['key-file'] }}" \ | ||||||
|  |             "${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}" \ | ||||||
|  |             "set -e; TARGET='${{ steps.production.outputs.path }}'; \ | ||||||
|  |              if [ -d \"$TARGET\" ]; then \ | ||||||
|  |                parent=\$(dirname \"$TARGET\"); \ | ||||||
|  |                base=\$(basename \"$TARGET\"); \ | ||||||
|  |                backup=\"\$parent/\${base}.backup.\$(date +%Y%m%d_%H%M%S)\"; \ | ||||||
|  |                mkdir -p \"\$backup\"; \ | ||||||
|  |                rsync -a --delete \"$TARGET/\" \"\$backup/\"; \ | ||||||
|  |                ls -dt \"\$parent/\${base}.backup.*\" 2>/dev/null | tail -n +6 | xargs rm -rf || true; \ | ||||||
|  |                echo 'Backup created at ' \"\$backup\"; \ | ||||||
|  |              else \ | ||||||
|  |                echo 'Production service path missing; skipping backup'; \ | ||||||
|  |              fi" | ||||||
|  |  | ||||||
|  |       - name: Publish service to production | ||||||
|  |         run: | | ||||||
|  |           rsync -az --delete \ | ||||||
|  |             -e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \ | ||||||
|  |             "${{ steps.fetch.outputs['service-dir'] }}/" \ | ||||||
|  |             "${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs.path }}/" | ||||||
|  |  | ||||||
|  |       - name: Promote documentation bundle | ||||||
|  |         if: github.event.inputs.include_docs == 'true' && steps.production.outputs['docs-path'] != '' | ||||||
|  |         run: | | ||||||
|  |           rsync -az --delete \ | ||||||
|  |             -e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \ | ||||||
|  |             "${{ steps.fetch.outputs['docs-dir'] }}/" \ | ||||||
|  |             "${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs['docs-path'] }}/" | ||||||
|  |  | ||||||
|  |       - name: Promotion summary | ||||||
|  |         run: | | ||||||
|  |           echo "✅ Promotion completed" | ||||||
|  |           echo "   Tag: ${{ github.event.inputs.tag }}" | ||||||
|  |           echo "   Service: ${{ steps.staging.outputs.host }} → ${{ steps.production.outputs.host }}" | ||||||
|  |           if [ "${{ github.event.inputs.include_docs }}" = "true" ]; then | ||||||
|  |             echo "   Docs: included" | ||||||
|  |           else | ||||||
|  |             echo "   Docs: skipped" | ||||||
|  |           fi | ||||||
							
								
								
									
										125
									
								
								AGENTS.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										125
									
								
								AGENTS.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,125 @@ | |||||||
|  | # 1) What is StellaOps? | ||||||
|  |  | ||||||
|  | **StellaOps** an open, sovereign, modular container-security toolkit built for high-speed, offline operation, released under AGPL-3.0-or-later. | ||||||
|  |  | ||||||
|  | It follows an SBOM-first model—analyzing each container layer or ingesting existing CycloneDX/SPDX SBOMs, then enriching them with vulnerability, licence, secret-leak, and misconfiguration data to produce cryptographically signed reports. | ||||||
|  |  | ||||||
|  | Vulnerability detection maps OS and language dependencies to sources such as NVD, GHSA, OSV, ENISA. | ||||||
|  | Secrets sweep flags exposed credentials or keys in files or environment variables. | ||||||
|  | Licence audit identifies potential conflicts, especially copyleft obligations. | ||||||
|  | Misconfiguration checks detect unsafe Dockerfile patterns (root user, latest tags, permissive modes). | ||||||
|  | Provenance features include in-toto/SLSA attestations signed with cosign for supply-chain trust. | ||||||
|  |  | ||||||
|  | | Guiding principle | What it means for Feedser | | ||||||
|  | |-------------------|---------------------------| | ||||||
|  | | **SBOM-first ingest** | Prefer signed SBOMs or reproducible layer diffs before falling back to raw scraping; connectors treat source docs as provenance, never as mutable truth. | | ||||||
|  | | **Deterministic outputs** | Same inputs yield identical canonical advisories and exported JSON/Trivy DB artefacts; merge hashes and export manifests are reproducible across machines. | | ||||||
|  | | **Restart-time plug-ins only** | Connector/exporter plug-ins load at service start, keeping runtime sandboxing simple and avoiding hot-patch attack surface. | | ||||||
|  | | **Sovereign/offline-first** | No mandatory outbound calls beyond allow-listed advisories; Offline Kit bundles Mongo snapshots and exporter artefacts for air-gapped installs. | | ||||||
|  | | **Operational transparency** | Every stage logs structured events (fetch, parse, merge, export) with correlation IDs so parallel agents can debug without shared state. | | ||||||
|  |  | ||||||
|  | Performance: warm scans < 5 s, cold scans < 30 s on a 4 vCPU runner. | ||||||
|  | Deployment: entirely SaaS-free, suitable for air-gapped or on-prem use through its Offline Kit. | ||||||
|  | Policy: anonymous users → 33 scans/day; verified → 333 /day; nearing 90 % quota triggers throttling but never full blocks. | ||||||
|  |  | ||||||
|  | More documention is available ./docs/*.md files. Read `docs/README.md` to gather information about the available documentation. You could inquiry specific documents as your work requires it | ||||||
|  |  | ||||||
|  | --- | ||||||
|  |  | ||||||
|  | # 3) Practices | ||||||
|  |  | ||||||
|  | ## 3.1) Naming | ||||||
|  | All modules are .NET projects based on .NET 10 (preview). Exclussion is the UI. It is based on Angular | ||||||
|  | All modules are contained by one or more projects. Each project goes in its dedicated folder. Each project starts with StellaOps.<ModuleName>. In case it is common for for all StellaOps modules it is library or plugin and it is named StellaOps.<LibraryOrPlugin>.  | ||||||
|  |  | ||||||
|  | ## 3.2)  Key technologies & integrations | ||||||
|  |  | ||||||
|  | - **Runtime**: .NET 10 (`net10.0`) preview SDK; C# latest preview features. | ||||||
|  | - **Data**: MongoDB (canonical store and job/export state). | ||||||
|  | - **Observability**: structured logs, counters, and (optional) OpenTelemetry traces. | ||||||
|  | - **Ops posture**: offline‑first, allowlist for remote hosts, strict schema validation, gated LLM fallback (only where explicitly configured). | ||||||
|  |  | ||||||
|  | # 4) Modules | ||||||
|  | StellaOps is contained by different modules installable via docker containers | ||||||
|  | - Feedser. Responsible for aggregation and delivery of vulnerability database | ||||||
|  | - Cli. Command line tool to unlock full potential - request database operations, install scanner, request scan, configure backend | ||||||
|  | - Backend. Configures and Manages scans | ||||||
|  | - UI. UI to access the backend (and scanners) | ||||||
|  | - Agent. Installable daemon that does the scanning | ||||||
|  | - Zastava. Realtime monitor for allowed (verified) installations. | ||||||
|  |  | ||||||
|  | ## 4.1) Feedser | ||||||
|  | It is webservice based module that is responsible for aggregating vulnerabilities information from various sources, parsing and normalizing them into a canonical shape, merging and deduplicating the results in one place, with export capabilities to Json and TrivyDb. It supports init and resume for all of the sources, parse/normalize and merge/deduplication operations, plus export. Export supports delta exports—similarly to full and incremential database backups. | ||||||
|  |  | ||||||
|  | ### 4.1.1) Usage | ||||||
|  | It supports operations to be started by cmd line: | ||||||
|  | # stella db [fetch|merge|export] [init|resume <point>] | ||||||
|  | or  | ||||||
|  | api available on https://db.stella-ops.org | ||||||
|  |  | ||||||
|  | ### 4.1.2) Data flow (end‑to‑end) | ||||||
|  |  | ||||||
|  | 1. **Fetch**: connectors request source windows with retries/backoff, persist raw documents with SHA256/ETag metadata. | ||||||
|  | 2. **Parse & Normalize**: validate to DTOs (schema-checked), quarantine failures, normalize to canonical advisories (aliases, affected ranges with NEVRA/EVR/SemVer, references, provenance). | ||||||
|  | 3. **Merge & Deduplicate**: enforce precedence, build/maintain alias graphs, compute deterministic hashes, and eliminate duplicates before persisting to MongoDB. | ||||||
|  | 4. **Export**: JSON tree and/or Trivy DB; package and (optionally) push; write export state. | ||||||
|  |  | ||||||
|  | ### 4.1.3) Architecture | ||||||
|  | For more information of the architecture see `./docs/ARCHITECTURE_FEEDSER.md`. | ||||||
|  |  | ||||||
|  | --- | ||||||
|  |  | ||||||
|  | ### 4.1.4) Glossary (quick) | ||||||
|  |  | ||||||
|  | - **OVAL** — Vendor/distro security definition format; authoritative for OS packages. | ||||||
|  | - **NEVRA / EVR** — RPM and Debian version semantics for OS packages. | ||||||
|  | - **PURL / SemVer** — Coordinates and version semantics for OSS ecosystems. | ||||||
|  | - **KEV** — Known Exploited Vulnerabilities (flag only). | ||||||
|  |  | ||||||
|  | --- | ||||||
|  | # 5) Your role as StellaOps contributor | ||||||
|  |  | ||||||
|  | You acting as information technology engineer that will take different type of roles in goal achieving StellaOps production implementation | ||||||
|  | In order you to work - you have to be supplied with directory that contains `AGENTS.md`,`TASKS.md` files. There will you have more information about the role you have, the scope of your work and the tasks you will have. | ||||||
|  |  | ||||||
|  | Boundaries: | ||||||
|  | - You operate only in the working directories I gave you, unless there is dependencies that makes you to work on dependency in shared directory. Then you ask for confirmation. | ||||||
|  |  | ||||||
|  | You main characteristics: | ||||||
|  | - Keep endpoints small, deterministic, and cancellation-aware. | ||||||
|  | - Improve logs/metrics as per tasks. | ||||||
|  | - Update `TASKS.md` when moving tasks forward. | ||||||
|  | - When you are done with all task you state explicitly you are done. | ||||||
|  | - Impersonate the role described on working directory `AGENTS.md` you will read, if role is not available - take role of the CTO of the StellaOps in early stages. | ||||||
|  | - You always strive for best practices | ||||||
|  | - You always strive for re-usability | ||||||
|  | - When in doubt of design decision - you ask then act | ||||||
|  | - You are autonomus - meaning that you will work for long time alone and achieve maximum without stopping for stupid questions | ||||||
|  | - You operate on the same directory where other agents will work. In case you need to work on directory that is dependency on provided `AGENTS.md`,`TASKS.md` files you have to ask for confirmation first. | ||||||
|  |  | ||||||
|  | ## 5.1) Type of contributions | ||||||
|  |  | ||||||
|  | - **BE‑Base (Platform & Pipeline)**   | ||||||
|  |   Owns DI, plugin host, job scheduler/coordinator, configuration binding, minimal API endpoints, and Mongo bootstrapping. | ||||||
|  | - **BE‑Conn‑X (Connectors)**   | ||||||
|  |   One agent per source family (NVD, Red Hat, Ubuntu, Debian, SUSE, GHSA, OSV, PSIRTs, CERTs, KEV, ICS). Implements fetch/parse/map with incremental watermarks. | ||||||
|  | - **BE‑Merge (Canonical Merge & Dedupe)**   | ||||||
|  |   Identity graph, precedence policies, canonical JSON serializer, and deterministic hashing (`merge_event`). | ||||||
|  | - **BE‑Export (JSON & Trivy DB)**   | ||||||
|  |   Deterministic export trees, Trivy DB packaging, optional ORAS push, and offline bundle. | ||||||
|  | - **QA (Validation & Observability)**   | ||||||
|  |   Schema tests, fixture goldens, determinism checks, metrics/logs/traces, e2e reproducibility runs. | ||||||
|  | - **DevEx/Docs**   | ||||||
|  |   Maintains this agent framework, templates, and per‑directory guides; assists parallelization and reviews. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | ## 5.2) Work-in-parallel rules (important) | ||||||
|  |  | ||||||
|  | - **Directory ownership**: Each agent works **only inside its module directory**. Cross‑module edits require a brief handshake in issues/PR description. | ||||||
|  | - **Scoping**: Use each module’s `AGENTS.md` and `TASKS.md` to plan; autonomous agents must read `src/AGENTS.md` and the module docs before acting. | ||||||
|  | - **Determinism**: Sort keys, normalize timestamps to UTC ISO‑8601, avoid non‑deterministic data in exports and tests. | ||||||
|  | - **Status tracking**: Update your module’s `TASKS.md` as you progress (TODO → DOING → DONE/BLOCKED). | ||||||
|  | - **Tests**: Add/extend fixtures and unit tests per change; never regress determinism or precedence. | ||||||
|  | - **Test layout**: Use module-specific projects in `StellaOps.Feedser.<Component>.Tests`; shared fixtures/harnesses live in `StellaOps.Feedser.Testing`. | ||||||
|  |  | ||||||
|  | --- | ||||||
| @@ -140,7 +140,7 @@ intra‑component reach‑ins. | |||||||
| | `identity`      | Embedded OAuth2/OIDC (OpenIddict 6)                | MIT OpenIddict          | `IIdentityProvider` for LDAP/SAML/JWT gateway     | | | `identity`      | Embedded OAuth2/OIDC (OpenIddict 6)                | MIT OpenIddict          | `IIdentityProvider` for LDAP/SAML/JWT gateway     | | ||||||
| | `pluginloader`  | Discover DLLs, SemVer gate, optional Cosign verify | Reflection + Cosign     | `IPluginLifecycleHook` for telemetry              | | | `pluginloader`  | Discover DLLs, SemVer gate, optional Cosign verify | Reflection + Cosign     | `IPluginLifecycleHook` for telemetry              | | ||||||
| | `scanning`      | SBOM‑ & image‑flow orchestration; runner pool      | Trivy CLI (default)     | `IScannerRunner` – e.g., Grype, Copacetic, Clair  | | | `scanning`      | SBOM‑ & image‑flow orchestration; runner pool      | Trivy CLI (default)     | `IScannerRunner` – e.g., Grype, Copacetic, Clair  | | ||||||
| | `feedmerge`    | Nightly NVD merge & feed enrichment                | Hangfire job            | drop‑in `*.Schedule.dll` for OSV, GHSA, NVD 2.0, CNNVD, CNVD, ENISA, JVN and BDU feeds | | | `feedser` (vulnerability ingest/merge/export service) | Nightly NVD merge & feed enrichment                | Hangfire job            | drop-in `*.Schedule.dll` for OSV, GHSA, NVD 2.0, CNNVD, CNVD, ENISA, JVN and BDU feeds | | ||||||
| | `tls`           | TLS provider abstraction                           | OpenSSL                 | `ITlsProvider` for custom suites (incl. **SM2**, where law or security requires it) | | | `tls`           | TLS provider abstraction                           | OpenSSL                 | `ITlsProvider` for custom suites (incl. **SM2**, where law or security requires it) | | ||||||
| | `reporting`     | Render HTML/PDF reports                            | RazorLight              | `IReportRenderer`                                 | | | `reporting`     | Render HTML/PDF reports                            | RazorLight              | `IReportRenderer`                                 | | ||||||
| | `ui`            | Angular SPA & i18n                                 | Angular {{ angular }}   | new locales via `/locales/{lang}.json`            | | | `ui`            | Angular SPA & i18n                                 | Angular {{ angular }}   | new locales via `/locales/{lang}.json`            | | ||||||
| @@ -152,7 +152,7 @@ classDiagram | |||||||
|     class identity |     class identity | ||||||
|     class pluginloader |     class pluginloader | ||||||
|     class scanning |     class scanning | ||||||
|     class feedmerger |     class feedser | ||||||
|     class tls |     class tls | ||||||
|     class reporting |     class reporting | ||||||
|     class ui |     class ui | ||||||
| @@ -163,13 +163,13 @@ classDiagram | |||||||
|     configuration ..> identity : Uses |     configuration ..> identity : Uses | ||||||
|     identity ..> pluginloader : Authenticates Plugins |     identity ..> pluginloader : Authenticates Plugins | ||||||
|     pluginloader ..> scanning : Loads Scanner Runners |     pluginloader ..> scanning : Loads Scanner Runners | ||||||
|     scanning ..> feedmerger : Triggers Feed Merges |     scanning ..> feedser : Triggers Feed Merges | ||||||
|     tls ..> AllModules : Provides TLS Abstraction |     tls ..> AllModules : Provides TLS Abstraction | ||||||
|     reporting ..> ui : Renders Reports for UI |     reporting ..> ui : Renders Reports for UI | ||||||
|     scheduling ..> feedmerger : Schedules Nightly Jobs |     scheduling ..> feedser : Schedules Nightly Jobs | ||||||
|  |  | ||||||
|     note for scanning "Pluggable: ISScannerRunner<br>e.g., Trivy, Grype" |     note for scanning "Pluggable: ISScannerRunner<br>e.g., Trivy, Grype" | ||||||
|     note for feedmerger "Pluggable: *.Schedule.dll<br>e.g., OSV, GHSA Feeds" |     note for feedser "Pluggable: *.Schedule.dll<br>e.g., OSV, GHSA Feeds" | ||||||
|     note for identity "Pluggable: IIdentityProvider<br>e.g., LDAP, SAML" |     note for identity "Pluggable: IIdentityProvider<br>e.g., LDAP, SAML" | ||||||
|     note for reporting "Pluggable: IReportRenderer<br>e.g., Custom PDF" |     note for reporting "Pluggable: IReportRenderer<br>e.g., Custom PDF" | ||||||
| ``` | ``` | ||||||
| @@ -220,30 +220,29 @@ Builder collects layer digests. | |||||||
| `POST /layers/missing` → Redis SDIFF → missing layer list (< 20 ms). | `POST /layers/missing` → Redis SDIFF → missing layer list (< 20 ms). | ||||||
| SBOM generated only for those layers and uploaded. | SBOM generated only for those layers and uploaded. | ||||||
|  |  | ||||||
| ### 4.3 Feed Enrichment | ### 4.3 Feedser Harvest & Export | ||||||
|  |  | ||||||
| ```mermaid | ```mermaid | ||||||
| sequenceDiagram | sequenceDiagram | ||||||
|     participant CRON as Nightly Cron (Hangfire) |     participant SCHED as Feedser Scheduler | ||||||
|     participant FM as Feed Merger |     participant CONN as Source Connector Plug-in | ||||||
|     participant NVD as NVD Feed |     participant FEEDSER as Feedser Core | ||||||
|     participant OSV as OSV Plugin (Optional) |     participant MONGO as MongoDB (Canonical Advisories) | ||||||
|     participant GHSA as GHSA Plugin (Optional) |     participant EXPORT as Exporter (JSON / Trivy DB) | ||||||
|     participant REGC as Regional Catalogue Plugin (Optional) |     participant ART as Artifact Store / Offline Kit | ||||||
|     participant REDIS as Redis (Merged Feed Storage) |  | ||||||
|     participant UI as Web UI |     SCHED->>CONN: Trigger window (init/resume) | ||||||
|  |     CONN->>CONN: Fetch source documents + metadata | ||||||
|     CRON->>FM: Trigger at 00:59 |     CONN->>FEEDSER: Submit raw document for parsing | ||||||
|     FM->>NVD: Fetch & Merge NVD Data |     FEEDSER->>FEEDSER: Parse & normalize to DTO | ||||||
|     alt Optional Plugins |     FEEDSER->>FEEDSER: Merge & deduplicate canonical advisory | ||||||
|         FM->>OSV: Merge OSV Feed |     FEEDSER->>MONGO: Write advisory, provenance, merge_event | ||||||
|         FM->>GHSA: Merge GHSA Feed |     FEEDSER->>EXPORT: Queue export delta request | ||||||
|         FM->>REGC: Merge Regional Catalogue Feed |     EXPORT->>MONGO: Read canonical snapshot/deltas | ||||||
|     end |     EXPORT->>EXPORT: Build deterministic JSON & Trivy DB artifacts | ||||||
|     FM->>REDIS: Persist Merged Feed |     EXPORT->>ART: Publish artifacts / Offline Kit bundle | ||||||
|     REDIS-->>UI: Update Feed Freshness |     ART-->>FEEDSER: Record export state + digests | ||||||
|     UI->>UI: Display Green 'Feed Age' Tile | ``` | ||||||
| ``` |  | ||||||
|  |  | ||||||
| ### 4.4 Identity & Auth Flow | ### 4.4 Identity & Auth Flow | ||||||
|  |  | ||||||
| @@ -264,15 +263,15 @@ without Core changes. | |||||||
|  |  | ||||||
| | Store          | Primary Use                                   | Why chosen                     | | | Store          | Primary Use                                   | Why chosen                     | | ||||||
| |----------------|-----------------------------------------------|--------------------------------| | |----------------|-----------------------------------------------|--------------------------------| | ||||||
| | **Redis 7**    | Queue, SBOM cache, Trivy DB mirror            | Sub‑1 ms P99 latency           | | | **MongoDB**    | Feedser canonical advisories, merge events, export state | Deterministic canonical store with flexible schema | | ||||||
| | **MongoDB**    | History > 180 d, audit logs, policy versions  | Optional; document‑oriented    | | | **Redis 7**    | CLI quotas, short-lived job scheduling, layer diff cache | Sub-1 ms P99 latency for hot-path coordination | | ||||||
| | **Local tmpfs**| Trivy layer cache (`/var/cache/trivy`)        | Keeps disk I/O off hot path    | | | **Local tmpfs**| Trivy layer cache (`/var/cache/trivy`)        | Keeps disk I/O off hot path    | | ||||||
|  |  | ||||||
| ```mermaid | ```mermaid | ||||||
| flowchart LR | flowchart LR | ||||||
|     subgraph "Persistence Layers" |     subgraph "Persistence Layers" | ||||||
|         REDIS[(Redis: Fast Cache/Queues<br>Sub-1ms P99)] |         REDIS[(Redis: Quotas & Short-lived Queues<br>Sub-1ms P99)] | ||||||
|         MONGO[(MongoDB: Optional Audit/History<br>>180 Days)] |         MONGO[(MongoDB: Canonical Advisories<br>Merge Events & Export State)] | ||||||
|         TMPFS[(Local tmpfs: Trivy Layer Cache<br>Low I/O Overhead)] |         TMPFS[(Local tmpfs: Trivy Layer Cache<br>Low I/O Overhead)] | ||||||
|     end |     end | ||||||
|  |  | ||||||
| @@ -294,7 +293,7 @@ flowchart LR | |||||||
| | **S‑1** | Pipeline Scan & Alert     | Stella CLI → SBOM → `/scan` → policy verdict → CI exit code & link to *Scan Detail*                | | | **S‑1** | Pipeline Scan & Alert     | Stella CLI → SBOM → `/scan` → policy verdict → CI exit code & link to *Scan Detail*                | | ||||||
| | **S‑2** | Mute Noisy CVE            | Dev toggles **Mute** in UI → rule stored in Redis → next build passes                           | | | **S‑2** | Mute Noisy CVE            | Dev toggles **Mute** in UI → rule stored in Redis → next build passes                           | | ||||||
| | **S‑3** | Nightly Re‑scan           | `SbomNightly.Schedule` re‑queues SBOMs (mask‑filter) → dashboard highlights new Criticals       | | | **S‑3** | Nightly Re‑scan           | `SbomNightly.Schedule` re‑queues SBOMs (mask‑filter) → dashboard highlights new Criticals       | | ||||||
| | **S‑4** | Feed Update Cycle         | `FeedMerge Service` merges feeds → UI *Feed Age* tile turns green                                      | | | **S‑4** | Feed Update Cycle         | `Feedser (vulnerability ingest/merge/export service)` refreshes feeds → UI *Feed Age* tile turns green | | ||||||
| | **S‑5** | Custom Report Generation  | Plug‑in registers `IReportRenderer` → `/report/custom/{digest}` → CI downloads artifact         | | | **S‑5** | Custom Report Generation  | Plug‑in registers `IReportRenderer` → `/report/custom/{digest}` → CI downloads artifact         | | ||||||
|  |  | ||||||
| ```mermaid | ```mermaid | ||||||
|   | |||||||
| @@ -1,371 +1,201 @@ | |||||||
| # 8 · Detailed Module Specifications — **Stella Ops** | # 8 · Detailed Module Specifications — **Stella Ops Feedser** | ||||||
| _This document defines every backend/agent module that composes Stella Ops, | _This document describes the Feedser service, its supporting libraries, connectors, exporters, and test assets that live in the OSS repository._ | ||||||
| their public contracts, configuration keys and extension points._ |  | ||||||
|  | --- | ||||||
| --- |  | ||||||
|  | ## 0 Scope   | ||||||
| ## 0 Scope   |  | ||||||
|  | Feedser is the vulnerability ingest/merge/export subsystem of Stella Ops. It | ||||||
| Describes **every .NET, and Angular project** that ships in the OSS Core, the plug‑in contracts they expose, and the runtime artefacts (Dockerfiles, Compose files) used to build and operate them.  Commercial capabilities appear *only* as extension points. | fetches primary advisories, normalizes and deduplicates them into MongoDB, and | ||||||
|  | produces deterministic JSON and Trivy DB exports. This document lists the | ||||||
| --- | projects that make up that workflow, the extension points they expose, and the | ||||||
|  | artefacts they ship. | ||||||
| ## 1 Repository Layout (flat)   |  | ||||||
|  | --- | ||||||
| ~~~text |  | ||||||
| src/ | ## 1 Repository layout (current)   | ||||||
|  │ docker-compose.yml |  | ||||||
|  └─ docker-compose-library/ | ```text | ||||||
|  │    ├─ docker-compose.no-deps.yml | src/ | ||||||
|  │    ├─ docker-compose.dep.redis.yml |  ├─ Directory.Build.props / Directory.Build.targets | ||||||
|  │    ├─ docker-compose.dep.mongo.yml |  ├─ StellaOps.Plugin/ | ||||||
|  │    ├─ docker-compose.dep.proxy.yml |  ├─ StellaOps.Feedser.Core/ | ||||||
|  │    ├─ docker-compose.dep.repository.yml |  ├─ StellaOps.Feedser.Core.Tests/ | ||||||
|  │    └─ docker-compose.local.yml |  ├─ StellaOps.Feedser.Models/ (+ .Tests/) | ||||||
|  └─ backend/ |  ├─ StellaOps.Feedser.Normalization/ (+ .Tests/) | ||||||
|  │    ├─ Dockerfile |  ├─ StellaOps.Feedser.Merge/ (+ .Tests/) | ||||||
|  │    ├─ StellaOps.Web/ |  ├─ StellaOps.Feedser.Storage.Mongo/ (+ .Tests/) | ||||||
|  │    ├─ StellaOps.Common/ |  ├─ StellaOps.Feedser.Exporter.Json/ (+ .Tests/) | ||||||
|  │    ├─ StellaOps.Plugins/ |  ├─ StellaOps.Feedser.Exporter.TrivyDb/ (+ .Tests/) | ||||||
|  │    ├─ StellaOps.Configuration/ |  ├─ StellaOps.Feedser.Source.* / StellaOps.Feedser.Source.*.Tests/ | ||||||
|  │    ├─ StellaOps.Localization/ |  ├─ StellaOps.Feedser.Testing/ | ||||||
|  │    ├─ StellaOps.TlsProvider.OpenSSL/ |  ├─ StellaOps.Feedser.Tests.Shared/ | ||||||
|  │    ├─ StellaOps.TlsProvider.OpenSSL.LegacyRegional/ |  ├─ StellaOps.Feedser.WebService/ (+ .Tests/) | ||||||
|  │    ├─ StellaOps.TlsProvider.Plugin.CustomTlsVendor/ |  ├─ PluginBinaries/ | ||||||
|  │    ├─ StellaOps.VulnerabilityDatabase/ |  └─ StellaOps.Feedser.sln | ||||||
|  │    ├─ StellaOps.Scheduling/ | ``` | ||||||
|  │    ├─ StellaOps.Scheduling.SbomsRescan/ |  | ||||||
|  │    ├─ StellaOps.Scheduling.MutesExpire/ | Each folder is a .NET project (or set of projects) referenced by | ||||||
|  │    ├─ StellaOps.Scheduling.Plugin.CommonCveFeed/ | `StellaOps.Feedser.sln`. Build assets are shared through the root | ||||||
|  │    ├─ StellaOps.Scheduling.Plugin.RegionalCatalogueFeed/ | `Directory.Build.props/targets` so conventions stay consistent. | ||||||
|  │    ├─ StellaOps.Scanners.Trivy/ |  | ||||||
|  │    ├─ StellaOps.Quota/ | --- | ||||||
|  │    ├─ StellaOps.Reporting/ |  | ||||||
|  │    ├─ StellaOps.Notifications/ | ## 2 Shared libraries   | ||||||
|  │    ├─ StellaOps.Notifications.Email/ |  | ||||||
|  │    ├─ StellaOps.Notifications.Plugin.MsTeams/ | | Project | Purpose | Key extension points | | ||||||
|  │    ├─ StellaOps.Authority/ | |---------|---------|----------------------| | ||||||
|  │    ├─ StellaOps.Authority.AD/ | | `StellaOps.Plugin` | Base contracts for connectors, exporters, and DI routines plus Cosign validation helpers. | `IFeedConnector`, `IExporterPlugin`, `IDependencyInjectionRoutine` | | ||||||
|  │    ├─ StellaOps.Cli/ | | `StellaOps.DependencyInjection` | Composable service registrations for Feedser and plug-ins. | `IDependencyInjectionRoutine` discovery | | ||||||
|  │    └─ StellaOps.Agent.Zastava/ | | `StellaOps.Feedser.Testing` | Common fixtures, builders, and harnesses for integration/unit tests. | `FeedserMongoFixture`, test builders | | ||||||
|  └─ frontend/ | | `StellaOps.Feedser.Tests.Shared` | Shared assembly metadata and fixtures wired in via `Directory.Build.props`. | Test assembly references | | ||||||
|       ├─ Dockerfile |  | ||||||
|       ├─ angular.json | --- | ||||||
|       ├─ stella-ops-ui/  |  | ||||||
|       └─ libs/ | ## 3 Core projects   | ||||||
|            ├─ dashboard/ |  | ||||||
|            ├─ scans/ | | Project | Responsibility | Extensibility | | ||||||
|            ├─ settings/ | |---------|----------------|---------------| | ||||||
|            ├─ core-ui/ | | `StellaOps.Feedser.WebService` | ASP.NET Core minimal API hosting Feedser jobs, status endpoints, and scheduler. | DI-based plug-in discovery; configuration binding |  | ||||||
|            └─ i18n/ | | `StellaOps.Feedser.Core` | Job orchestration, connector pipelines, merge workflows, export coordination. | `IFeedConnector`, `IExportJob`, deterministic merge policies | | ||||||
| ~~~ | | `StellaOps.Feedser.Models` | Canonical advisory DTOs and enums persisted in MongoDB and exported artefacts. | Partial classes for source-specific metadata | | ||||||
|  | | `StellaOps.Feedser.Normalization` | Version comparison, CVSS normalization, text utilities for canonicalization. | Helpers consumed by connectors/merge | | ||||||
| All projects are referenced by **`StellaOps.sln`**; `dotnet publish -c Release -p:PublishSingleFile=true` builds a self‑contained **`StellaOps.Api`** binary (plug‑ins load at runtime). | | `StellaOps.Feedser.Merge` | Precedence evaluation, alias graph maintenance, merge-event hashing. | Policy extensions via DI | | ||||||
|  | | `StellaOps.Feedser.Storage.Mongo` | Repository layer for documents, DTOs, advisories, merge events, export state. | Connection string/config via options | | ||||||
| --- | | `StellaOps.Feedser.Exporter.Json` | Deterministic vuln-list JSON export pipeline. | Dependency injection for storage + plugin to host |  | ||||||
|  | | `StellaOps.Feedser.Exporter.TrivyDb` | Builds Trivy DB artefacts from canonical advisories. | Optional ORAS push routines | | ||||||
| ## 2 Shared Libraries   |  | ||||||
|  | ### 3.1 StellaOps.Feedser.WebService   | ||||||
| | Project | Purpose | Key Interfaces | |  | ||||||
| |---------|---------|----------------| | * Hosts minimal API endpoints (`/health`, `/status`, `/jobs`). | ||||||
| | `StellaOps.Common` | Serilog sinks, Redis key helpers, DTO primitives. | `RedisKeys`, `Result<T>` | | * Runs the scheduler that triggers connectors and exporters according to | ||||||
| | `StellaOps.Plugins` | Plug‑in contracts + Cosign verification. | `IStellaPlugin`, `IScannerRunner`, `ITlsProvider`, `IScheduleJob` | |   configured windows. | ||||||
| | `StellaOps.Localization` | Loads JSON locale bundles (backend & Angular). | `ILocaleProvider`, `CultureMiddleware` | | * Applies dependency-injection routines from `PluginBinaries/` at startup only | ||||||
|  |   (restart-time plug-ins). | ||||||
| Angular JSON‑bundle workflow matches the official i18n guide . |  | ||||||
|  | ### 3.2 StellaOps.Feedser.Core   | ||||||
| --- |  | ||||||
|  | * Defines job primitives (fetch, parse, map, merge, export) used by connectors. | ||||||
| ## 3 Core Back‑end Projects   | * Coordinates deterministic merge flows and writes `merge_event` documents. | ||||||
|  | * Provides telemetry/log scopes consumed by WebService and exporters. | ||||||
| | Project | Responsibility | Extensibility | |  | ||||||
| |---------|----------------|---------------| | ### 3.3 StellaOps.Feedser.Storage.Mongo   | ||||||
| | **`StellaOps.Api`** | ASP.NET host; source‑gen auto‑wires module endpoints. | Attributes `[MapRestController]`, `[MapHealth]`. | |  | ||||||
| | **`StellaOps.Configuration`** | Bind `appsettings.json` → typed options; `/health`. | `IConfigValidator`. | | * Persists raw documents, DTO records, canonical advisories, aliases, affected | ||||||
| | **`StellaOps.Quota`** | Enforces **Free‑tier quota** ({{ quota_token }}s scans/day) with early‑warning banner, 5 s soft back‑off, 60 s wait‑wall. | Swappable via `IQuotaStore` (e.g., Postgres). | |   packages, references, merge events, export state, and job leases. | ||||||
| | **`StellaOps.JwtIssuer` *(new)* | Issues, refreshes and validates **Client‑JWTs**. For offline sites it produces a 30‑day token during OUK build and again on every OUK import. | `ITokenSigner` (e.g., HSM) | | * Exposes repository helpers for exporters to stream full/delta snapshots. | ||||||
| | **`StellaOps.TlsProvider.OpenSSL`** | Default TLS suites. | New suites via `ITlsProvider` plug‑in. | |  | ||||||
| | **`StellaOps.TlsProvider.OpenSSL.LegacyRegional`** | . | — | | ### 3.4 StellaOps.Feedser.Exporter.*   | ||||||
| | **`StellaOps.VulnerabilityDatabase`** | Feed‑merge CLI writing Redis. | `IAdditionalFeedSource` (OSV, GHSA, regional catalogues). | |  | ||||||
| | **`StellaOps.Scheduling`** | Hangfire host inside API . | Jobs via `IScheduleJob`. | | * `Exporter.Json` mirrors the Aqua vuln-list tree with canonical ordering. | ||||||
| | **`StellaOps.Scheduling.SbomsRescan`** | Nightly SBOM re‑scan (`0 2 * * *`). | — | | * `Exporter.TrivyDb` builds Trivy DB Bolt archives and optional OCI bundles. | ||||||
| | **`StellaOps.Scheduling.MutesExpire`** | Daily mute expiry cleanup. | — | | * Both exporters honour deterministic hashing and respect export cursors. | ||||||
| | **`StellaOps.Scanners.Trivy`** | Trivy CLI for SBOM & image scans . | Other engines implement `IScannerRunner`. | |  | ||||||
| | **`StellaOps.Reporting`** | RazorLight HTML reports. | `IReportRenderer` for SARIF, CycloneDX. | | --- | ||||||
| | **`StellaOps.Notifications`** | DI contracts for alerts. | `INotifier`. | |  | ||||||
| | **`StellaOps.Notifications.Email`** | SMTP channel. | — | | ## 4 Source connectors   | ||||||
| | **`StellaOps.Authority`** | OAuth2 / OIDC via OpenIddict 4 . | External IdPs via plug‑in. | |  | ||||||
| | **`StellaOps.Registry`**        | read‑only Docker registry for agents + SBOM‑builder      | Registry v2 (nginx‑hardened)      | `IRegistryProvider` | | Connectors live under `StellaOps.Feedser.Source.*` and conform to the interfaces | ||||||
| | **`StellaOps.MutePolicies`**   | store YAML / Rego policies, validate & version           | MongoDB + Redis                   | `IPolicyStore` | | in `StellaOps.Plugin`. | ||||||
| | **`StellaOps.Attestor`** *(TODO)*| SLSA provenance + Rekor verification                      | Sigstore Rekor                    | `IAttestor` | |  | ||||||
|  | | Family | Project(s) | Notes | | ||||||
| ## 3 · Module Details   | |--------|------------|-------| | ||||||
|  | | Distro PSIRTs | `StellaOps.Feedser.Source.Distro.*` | Debian, Red Hat, SUSE, Ubuntu connectors with NEVRA/EVR helpers. | | ||||||
| > _Only contracts and configuration that may change in the next two quarters are shown; for stable, unchanging keys see the inline XML‑doc in the codebase._ | | Vendor PSIRTs | `StellaOps.Feedser.Source.Vndr.*` | Adobe, Apple, Cisco, Chromium, Microsoft, Oracle, VMware. | | ||||||
|  | | Regional CERTs | `StellaOps.Feedser.Source.Cert*`, `Source.Ru.*`, `Source.Ics.*`, `Source.Kisa` | Provide enrichment metadata while preserving vendor precedence. | | ||||||
| ### 3.1. StellaOps.Configuration   | | OSS ecosystems | `StellaOps.Feedser.Source.Ghsa`, `Source.Osv`, `Source.Cve`, `Source.Kev`, `Source.Acsc`, `Source.Cccs`, `Source.Jvn` | Emit SemVer/alias-rich advisories. | | ||||||
|  |  | ||||||
| * **Responsibility** – parse environment variables or `appsettings.json`; expose `/health`, `/metrics`.   | Each connector ships fixtures/tests under the matching `*.Tests` project. | ||||||
| * **Key extension point** – `IConfigValidator` → validate & normalise custom settings before DI builds.   |  | ||||||
|  | --- | ||||||
| ### 3.2. StellaOps.Authority   |  | ||||||
|  | ## 5 · Module Details   | ||||||
| * **Responsibility** – ship with OpenIddict 6, supporting *client‑credentials* and *password* grants.   |  | ||||||
| * `IIdentityProvider` plug‑in can delegate token issuance to LDAP, SAML, Keycloak …   | > _Focus on the Feedser-specific services that replace the legacy FeedMerge cron._ | ||||||
|  |  | ||||||
|  | ### 5.1 Feedser.Core   | ||||||
| ### 3.3. StellaOps.Scanners   |  | ||||||
|  | * Owns the fetch → parse → merge → export job pipeline and enforces deterministic | ||||||
| * **Primary flow** – SBOM‑first; falls back to image‑unpack if SBOM absent.   |   merge hashes (`merge_event`). | ||||||
| * **Multi‑Format Support** – side‑car `.sbom.type` file; auto‑detects (`SPDXID:` or `bomFormat` heuristics).   | * Provides `JobSchedulerBuilder`, job coordinator, and telemetry scopes consumed | ||||||
| * **Delta Layer Workflow** – `POST /layers/missing` (`SET DIFF` on Redis) responds < 20 ms; Stella CLI passes only new layers.   |   by the WebService and exporters. | ||||||
| * **Plug‑in contract evolution**   |  | ||||||
|  | ### 5.2 Feedser.Storage.Mongo   | ||||||
| ```csharp |  | ||||||
| // current | * Bootstrapper creates collections/indexes (documents, dto, advisory, alias, | ||||||
| Task<ScanResult> RunAsync(Stream sbomJson, CancellationToken ct); |   affected, merge_event, export_state, jobs, locks). | ||||||
|  | * Repository APIs surface full/delta advisory reads for exporters, plus | ||||||
| // v2 (preferred) |   SourceState and job lease persistence. | ||||||
| Task<ScanResult> RunAsync(Stream sbom, SbomFormat fmt, CancellationToken ct); |  | ||||||
| ``` | ### 5.3 Feedser.Exporter.Json / Feedser.Exporter.TrivyDb   | ||||||
|  |  | ||||||
| ### 3.5 StellOps.Registry   | * JSON exporter mirrors vuln-list layout with per-file digests and manifest. | ||||||
|  | * Trivy DB exporter shells or native-builds Bolt archives, optionally pushes OCI | ||||||
| * **Purpose** – internal, anonymous **read‑only** Docker registry to avoid GHCR / Docker Hub pulls.   |   layers, and records export cursors. | ||||||
| * **Deployment** – container `stellops.registry:2`; mounted volume `/var/lib/registry`; optional TLS via env vars.   |  | ||||||
|  | ### 5.4 Feedser.WebService   | ||||||
| | Key                              | Default | Notes                           | |  | ||||||
| |----------------------------------|---------|---------------------------------| | * Minimal API host exposing `/health`, `/ready`, `/jobs` and wiring telemetry. | ||||||
| | `REGISTRY_READONLY`              | `true`  | Forces 403 on PUT, 405 on DELETE | | * Loads restart-time plug-ins from `PluginBinaries/`, executes Mongo bootstrap, | ||||||
| | `REGISTRY_STORAGE_DELETE_ENABLED`| `false` | Immutable tags                   | |   and registers built-in connectors/exporters with the scheduler. | ||||||
|  |  | ||||||
| **Plug‑in contract** — `IRegistryProvider.PullAsync(string imageRef)` for mapping to Artifactory, Harbor, etc. | ### 5.5 Plugin host & DI bridge   | ||||||
|  |  | ||||||
| --- | * `StellaOps.Plugin` + `StellaOps.DependencyInjection` provide the contracts and | ||||||
|  |   helper routines for connectors/exporters to integrate with the WebService. | ||||||
| ### 3.6 StellaOps.MutePolicies   |  | ||||||
|  | --- | ||||||
| * **Purpose** – central Policy‑as‑Code store (YAML v1 now, Rego soon).   |  | ||||||
| * **Persistence** – current live rules in Redis (`policies:active`); immutable commits in Mongo `policies_history`.   | ## 6 · Plug-ins & Agents   | ||||||
|  |  | ||||||
| | REST verb | Path                | Description               | | * **Plug-in discovery** – restart-only; the WebService enumerates | ||||||
| |-----------|---------------------|---------------------------| |   `PluginBinaries/` (or configured directories) and executes the contained | ||||||
| | `GET`     | `/policy/export`    | download active YAML      | |   `IDependencyInjectionRoutine` implementations. | ||||||
| | `POST`    | `/policy/import`    | upload YAML / Rego file   | | * **Connector/exporter packages** – each source/exporter can ship as a plug-in | ||||||
| | `POST`    | `/policy/validate`  | lint without persisting   | |   assembly with its own options and HttpClient configuration, keeping the core | ||||||
|  |   image minimal. | ||||||
| **CLI** – Stella CLI gains `--policy-file scan-policy.yaml`. | * **Stella CLI (agent)** – triggers feed-related jobs (`stella db fetch/merge/export`) | ||||||
|  |   and consumes the exported JSON/Trivy DB artefacts, aligning with the SBOM-first | ||||||
| **Plug‑in contract** — `IPolicyStore` for GitOps back‑ends, Vault, etc. |   workflow described in `AGENTS.md`. | ||||||
|  | * **Offline Kit** – bundles Feedser plug-ins, JSON tree, Trivy DB, and export | ||||||
| --- |   manifests so air-gapped sites can load the latest vulnerability data without | ||||||
|  |   outbound connectivity. | ||||||
| ### 3.7. StellaOps.Attestor *(Planned – Q1‑2026)*   |  | ||||||
|  | --- | ||||||
| Handles SLSA provenance docs and Rekor log verification. |  | ||||||
|  | ## 7 · Docker & Distribution Artefacts   | ||||||
| ```csharp |  | ||||||
| public interface IAttestor { | | Artefact | Path / Identifier | Notes | | ||||||
|     Task<ProvenanceDoc> CreateAsync(ImageRef img, Sbom sbom); | |----------|-------------------|-------| | ||||||
|     Task<bool> VerifyAsync(ProvenanceDoc doc); | | Feedser WebService image | `containers/feedser/Dockerfile` (built via CI) | Self-contained ASP.NET runtime hosting scheduler/endpoints. | | ||||||
| } | | Plugin bundle | `PluginBinaries/` | Mounted or baked-in assemblies for connectors/exporters. | | ||||||
| ``` | | Offline Kit tarball | Produced by CI release pipeline | Contains JSON tree, Trivy DB OCI layout, export manifest, and plug-ins. | | ||||||
|  | | Local dev compose | `scripts/` + future compose overlays | Developers can run MongoDB, Redis (optional), and WebService locally. | | ||||||
| ### 3.7. StellaOps.FeedMerge.Service |  | ||||||
|  | --- | ||||||
| Nightly Hangfire job (01:00) merges NVD JSON; plug‑ins can provide ISourceFeed for OSV, GHSA, NVD, CNNVD, CNVD, ENISA and BDU feeds. |  | ||||||
|  | ## 8 · Performance Budget   | ||||||
| ### 3.8. StellOps.Tls |  | ||||||
|  | | Scenario | Budget | Source | | ||||||
| Abstracts TLS stack; default OpenSSL; `ITlsProvider` lets enterprises swap in custom suites—**including SM2, where law or security requires it**. | |----------|--------|--------| | ||||||
|  | | Advisory upsert (large advisory) | ≤ 500 ms/advisory | `AdvisoryStorePerformanceTests` (Mongo) | | ||||||
| ### 3.9. StellaOps.Reporting | | Advisory fetch (`GetRecent`) | ≤ 200 ms/advisory | Same performance test harness | | ||||||
|  | | Advisory point lookup (`Find`) | ≤ 200 ms/advisory | Same performance test harness | | ||||||
| HTML / PDF generation via RazorLight; custom renderers via IReportRenderer. | | Bulk upsert/fetch cycle | ≤ 28 s total for 30 large advisories | Same performance test harness | | ||||||
|  | | Feedser job scheduling | Deterministic cron execution via `JobSchedulerHostedService` | `StellaOps.Feedser.Core` tests | | ||||||
| ### 3.10 UI | | Trivy DB export | Deterministic digests across runs (ongoing TODO for end-to-end test) | `Exporter.TrivyDb` backlog | | ||||||
|  |  | ||||||
| Angular 17 SPA; lazy‑loaded feature modules, standalone component routes for UI plug‑ins. | Budgets are enforced in automated tests where available; outstanding TODO/DOING | ||||||
|  | items (see task boards) continue tracking gaps such as exporter determinism. | ||||||
| Static Go daemon / k8s DaemonSet; watches Docker/CRI‑O events; uploads SBOMs; optional enforce mode via policy plug‑in. |  | ||||||
|  | --- | ||||||
| ### 3.11 StellaOps.Quota — **Free‑Tier Daily Quota Service** |  | ||||||
|  | ## 9 Testing   | ||||||
| **Responsibility** |  | ||||||
|  | * Unit and integration tests live alongside each component (`*.Tests`). | ||||||
| * Track per‑token scan count (`quota:<token>` key in Redis). | * Shared fixtures come from `StellaOps.Feedser.Testing` and | ||||||
| * Reset counters at **00:00 UTC** with key TTL. |   `StellaOps.Feedser.Tests.Shared` (linked via `Directory.Build.props`). | ||||||
| * Inject HTTP headers   | * Integration suites use ephemeral MongoDB and Redis via Testcontainers to | ||||||
|   * `X‑Stella‑Quota‑Remaining`   |   validate end-to-end flow without external dependencies. | ||||||
|   * `X‑Stella‑Reset` |  | ||||||
| * Apply adaptive throttling:   | --- | ||||||
|   * scans 90% of {{ quota_token }};   |  | ||||||
|   * scans 10% of the max daily → UI banner flag `X‑Stella‑Quota‑Warn:true`;   |  | ||||||
|   * scans ≥ {{ quota_token }}  → reply is slower. |  | ||||||
| * **Offline token awareness** — if `token.valid == false` and |  | ||||||
|   `OfflineMode == true`, return HTTP *451 ComplianceBlock* so that CLI gives a |  | ||||||
|   clear actionable error. |  | ||||||
| * New config: |  | ||||||
|  |  | ||||||
| ```json |  | ||||||
| "Quota": { |  | ||||||
|   "OfflineGraceDays": 7   // show banner this many days before token expiry |  | ||||||
| } |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| **Interface** |  | ||||||
|  |  | ||||||
| ```csharp |  | ||||||
| public interface IQuotaService |  | ||||||
| { |  | ||||||
|     /// <summary>Returns true when the call is allowed.</summary> |  | ||||||
|     Task<QuotaVerdict> CheckAsync(string token, CancellationToken ct); |  | ||||||
| } |  | ||||||
|  |  | ||||||
| public readonly record struct QuotaVerdict( |  | ||||||
|     bool IsAllowed, |  | ||||||
|     int Remaining, |  | ||||||
|     DateTimeOffset ResetUtc, |  | ||||||
|     TimeSpan RetryAfter); |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| **Configuration** (`appsettings.json` keys) |  | ||||||
|  |  | ||||||
| ```json |  | ||||||
| "Quota": { |  | ||||||
|   "FreeTierDailyLimit": {{ quota_token }} , |  | ||||||
|   "WarnThreshold": 200, |  | ||||||
|   "SoftRetrySeconds": 5, |  | ||||||
|   "HardRetrySeconds": 60 |  | ||||||
| } |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| **Extensibility** |  | ||||||
|  |  | ||||||
| * Override storage by providing an `IQuotaStore` plug‑in for Postgres or Mongo. |  | ||||||
| * UI plug‑ins can subscribe to SSE `/quota/events` for custom dashboards. |  | ||||||
|  |  | ||||||
| ### 3.12 StellaOps.JwtIssuer — new section |  | ||||||
|  |  | ||||||
| |API	|Path|	Notes| |  | ||||||
| |-----|----|-------| |  | ||||||
| |`POST	/token/offline`	| Admin‑only. | Generates a 30 d Client‑JWT for air‑gapped clusters; returns ZIP that the admin can copy to the target host. |  | ||||||
|  |  | ||||||
| *OUK hook* |  | ||||||
|  |  | ||||||
| * OUK builder calls JwtIssuer.SignOfflineToken(exp=+30d). |  | ||||||
| * Drops client.jwt into ouk/root/. |  | ||||||
| * Backend OUK importer places file under /var/lib/stella/tokens/. |  | ||||||
|  |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| ## 4 · Compose / Helm Snippet (reference) |  | ||||||
|  |  | ||||||
| ```yaml |  | ||||||
| services: |  | ||||||
|   registry: |  | ||||||
|     image: stellops.registry:2 |  | ||||||
|     restart: unless-stopped |  | ||||||
|     environment: |  | ||||||
|       REGISTRY_READONLY: "true" |  | ||||||
|     volumes: |  | ||||||
|       - ./_registry:/var/lib/registry |  | ||||||
|     ports: |  | ||||||
|       - "5000:5000" |  | ||||||
|  |  | ||||||
|   backend: |  | ||||||
|     image: registry.local/stellops/backend:${TAG} |  | ||||||
|     depends_on: [registry, redis] |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| ## 4 Plug‑ins (sign‑required)   |  | ||||||
|  |  | ||||||
| | Plug‑in | Contract | Notes | |  | ||||||
| |---------|----------|-------| |  | ||||||
| | `StellaOps.Notifications.Plugin.MsTeams` | `INotifier` | Sends cards to Teams webhooks. | |  | ||||||
| | `StellaOps.Authority.AD` | `IIdentityProvider` | LDAP/Active‑Directory token issue. | |  | ||||||
| | `StellaOps.Scheduling.Plugin.CommonCveFeed` | `IScheduleJob` | Merges OSV & NVD JSON hourly . | |  | ||||||
| | `StellaOps.Scheduling.Plugin.RegionalCatalogueFeed` | `IScheduleJob` | Imports NVD 2.0, CNNVD, CNVD, ENISA, JVN and BDU XML daily. | |  | ||||||
| | `StellaOps.TlsProvider.Plugin.CustomTlsVendor` | `ITlsProvider` | Binds regional specific shared libs. | |  | ||||||
|  |  | ||||||
| Cosign signatures are mandatory; loader rejects unsigned DLLs when `DisableUnsigned=false`. |  | ||||||
|  |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| ## 5 Agents   |  | ||||||
|  |  | ||||||
| ### 5.1 `StellaOps.Cli`   |  | ||||||
|  |  | ||||||
| Distroless CLI; |  | ||||||
| Returns exit‑code 1 on policy violation, enabling CI blocking. |  | ||||||
| * **Role** – CI helper: Build SBOM, call `/scan`, exit non‑zero on high severity.   |  | ||||||
| * **Flags** – `--engine`, `--threshold`, `--registry-pull-token`, `--pdf-out`, `--delta`, `--sbom-type`, `--policy-file.`.   |  | ||||||
| * **Auth** – OAuth2 *scanner* scope. |  | ||||||
|  |  | ||||||
| ### 5.2 `StellaOps.Agent.Zastava`   |  | ||||||
|   |  | ||||||
| * **Role** – Passive container inventory → uploads SBOMs via `/agent/sbom`.   |  | ||||||
| * **Modes** – `off`, `inventory` (Core default).   |  | ||||||
| * No kernel driver (unlike Falco) . |  | ||||||
|  |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| ## 6 Angular Front‑end   |  | ||||||
|  |  | ||||||
| | Package | Path | Feature | Lazy | |  | ||||||
| |---------|------|---------|------| |  | ||||||
| | **App** | `frontend/stella-ops-ui/` | Shell, auth guards. | — | |  | ||||||
| | `dashboard` | `libs/dashboard/` | Live metrics tiles. | ✔ | |  | ||||||
| | `scans` | `libs/scans/` | List, detail, mute, diff. | ✔ | |  | ||||||
| | `settings` | `libs/settings/` | Feed cron, workers, TLS switch. | ✔ | |  | ||||||
| | `core-ui` | `libs/core-ui/` | Tailwind components. | — | |  | ||||||
| | `i18n` | `libs/i18n/` | Runtime locale switch, pipe. | — | |  | ||||||
|  |  | ||||||
| Lazy loading of workspace libs follows Nx/Angular guidance . |  | ||||||
|  |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| ## 7 Docker Artefacts   |  | ||||||
|  |  | ||||||
| ### 7.1 Dockerfiles   |  | ||||||
|  |  | ||||||
| * **`backend/Dockerfile`** – multi‑stage .NET {{ dotnet }}; single‑file publish; distroless runtime .   |  | ||||||
| * **`frontend/Dockerfile`** – Node 20 build → Nginx static serve.   |  | ||||||
| * Every plug‑in repo may include its own Dockerfile when shipping side‑cars (e.g., custom scanner). |  | ||||||
|  |  | ||||||
| ### 7.2 Compose Stacks   |  | ||||||
|  |  | ||||||
| * **`docker-compose.yml`**   |  | ||||||
|   * Extends above with Redis 7 and Mongo 7 for small on‑prem installs. |  | ||||||
|  |  | ||||||
| * **`docker-compose.no-deps.yml`**   |  | ||||||
|   * backend, frontend, Trivy, Maven proxy.   |  | ||||||
|   * Assumes external Redis & Mongo. |  | ||||||
|  |  | ||||||
| * **`docker-compose.local.yml`**   |  | ||||||
|   * Build images from local source and bring up backend, frontend, Redis, Mongo, Trivy, Maven proxy for dev‑loop. |  | ||||||
|  |  | ||||||
| Docker Compose override precedence matches official docs . |  | ||||||
|  |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| ## 8 Performance Budget   |  | ||||||
|  |  | ||||||
| | Flow | P95 target | Bottleneck | |  | ||||||
| |------|-----------:|-----------| |  | ||||||
| | SBOM fast‑path | ≤ 5 s | Redis queue depth (keep P99 < 1 ms)  | |  | ||||||
| | Image‑unpack | ≤ 10 s | Trivy layer unpack. | |  | ||||||
| | Nightly re‑scan | 80 SBOM/s | Runner CPU. | |  | ||||||
|  |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| ## Change Log   |  | ||||||
|  |  | ||||||
| | Version | Date | Notes | |  | ||||||
| |---------|------|-------| |  | ||||||
| | **v2.2** | 2025‑07‑11 | Flat layout; stella‑ops‑ui naming; Dockerfiles & 3 Compose stacks; agents and localisation library. | |  | ||||||
| | v2.1 | 2025‑07‑11 | First flat‑structure draft. | |  | ||||||
|  |  | ||||||
| *(End of Module Specifications v2.2‑core)* |  | ||||||
|   | |||||||
| @@ -81,7 +81,7 @@ cosign verify \ | |||||||
|  |  | ||||||
| ## 5 · Private‑feed mirrors 🌐 | ## 5 · Private‑feed mirrors 🌐 | ||||||
|  |  | ||||||
| The **FeedMerge** service provides a signed SQLite snapshot merging: | The **Feedser (vulnerability ingest/merge/export service)** provides signed JSON and Trivy DB snapshots that merge: | ||||||
|  |  | ||||||
| * OSV + GHSA | * OSV + GHSA | ||||||
| * (optional) NVD 2.0, CNNVD, CNVD, ENISA, JVN and BDU  regionals | * (optional) NVD 2.0, CNNVD, CNVD, ENISA, JVN and BDU  regionals | ||||||
| @@ -98,4 +98,4 @@ We are grateful to the researchers who help keep Stella Ops safe: | |||||||
| | ------- | ------------------ | ------------ | | | ------- | ------------------ | ------------ | | ||||||
| | *empty* | *(your name here)* |              | | | *empty* | *(your name here)* |              | | ||||||
|  |  | ||||||
| --- | --- | ||||||
|   | |||||||
| @@ -20,7 +20,7 @@ open a PR and append it alphabetically.* | |||||||
| | **ADR** | *Architecture Decision Record* – lightweight Markdown file that captures one irreversible design decision. | ADR template lives at `/docs/adr/` | | | **ADR** | *Architecture Decision Record* – lightweight Markdown file that captures one irreversible design decision. | ADR template lives at `/docs/adr/` | | ||||||
| | **AIRE** | *AI Risk Evaluator* – optional Plus/Pro plug‑in that suggests mute rules using an ONNX model. | Commercial feature | | | **AIRE** | *AI Risk Evaluator* – optional Plus/Pro plug‑in that suggests mute rules using an ONNX model. | Commercial feature | | ||||||
| | **Azure‑Pipelines** | CI/CD service in Microsoft Azure DevOps. | Recipe in Pipeline Library | | | **Azure‑Pipelines** | CI/CD service in Microsoft Azure DevOps. | Recipe in Pipeline Library | | ||||||
| | **BDU** | Russian (FSTEC) national vulnerability database: *База данных уязвимостей*. | Merged with NVD by FeedMerge Service | | | **BDU** | Russian (FSTEC) national vulnerability database: *База данных уязвимостей*. | Merged with NVD by Feedser (vulnerability ingest/merge/export service) | | ||||||
| | **BuildKit** | Modern Docker build engine with caching and concurrency. | Needed for layer cache patterns | | | **BuildKit** | Modern Docker build engine with caching and concurrency. | Needed for layer cache patterns | | ||||||
| | **CI** | *Continuous Integration* – automated build/test pipeline. | Stella integrates via CLI | | | **CI** | *Continuous Integration* – automated build/test pipeline. | Stella integrates via CLI | | ||||||
| | **Cosign** | Open‑source Sigstore tool that signs & verifies container images **and files**. | Images & OUK tarballs | | | **Cosign** | Open‑source Sigstore tool that signs & verifies container images **and files**. | Images & OUK tarballs | | ||||||
| @@ -36,7 +36,7 @@ open a PR and append it alphabetically.* | |||||||
| | **Digest (image)** | SHA‑256 hash uniquely identifying a container image or layer. | Pin digests for reproducible builds | | | **Digest (image)** | SHA‑256 hash uniquely identifying a container image or layer. | Pin digests for reproducible builds | | ||||||
| | **Docker‑in‑Docker (DinD)** | Running Docker daemon inside a CI container. | Used in GitHub / GitLab recipes | | | **Docker‑in‑Docker (DinD)** | Running Docker daemon inside a CI container. | Used in GitHub / GitLab recipes | | ||||||
| | **DTO** | *Data Transfer Object* – C# record serialised to JSON. | Schemas in doc 11 | | | **DTO** | *Data Transfer Object* – C# record serialised to JSON. | Schemas in doc 11 | | ||||||
| | **FeedMerge service** | Background job that merges OVN, GHSA and NVD 2.0, CNNVD, CNVD, ENISA, JVN and BDU XML into Redis. | Cron default `0 1 * * *` | | | **Feedser** | Vulnerability ingest/merge/export service consolidating OVN, GHSA, NVD 2.0, CNNVD, CNVD, ENISA, JVN and BDU feeds into the canonical MongoDB store and export artifacts. | Cron default `0 1 * * *` | | ||||||
| | **FSTEC** | Russian regulator issuing SOBIT certificates. | Pro GA target | | | **FSTEC** | Russian regulator issuing SOBIT certificates. | Pro GA target | | ||||||
| | **Gitea** | Self‑hosted Git service – mirrors GitHub repo. | OSS hosting | | | **Gitea** | Self‑hosted Git service – mirrors GitHub repo. | OSS hosting | | ||||||
| | **GOST TLS** | TLS cipher‑suites defined by Russian GOST R 34.10‑2012 / 34.11‑2012. | Provided by `OpenSslGost` or CryptoPro | | | **GOST TLS** | TLS cipher‑suites defined by Russian GOST R 34.10‑2012 / 34.11‑2012. | Provided by `OpenSslGost` or CryptoPro | | ||||||
|   | |||||||
| @@ -150,7 +150,7 @@ cosign verify ghcr.io/stellaops/backend@sha256:<DIGEST> \ | |||||||
| | Layer                | Cadence                                                  | Method                         | | | Layer                | Cadence                                                  | Method                         | | ||||||
| | -------------------- | -------------------------------------------------------- | ------------------------------ | | | -------------------- | -------------------------------------------------------- | ------------------------------ | | ||||||
| | Backend & CLI images | Monthly or CVE‑driven docker pull + docker compose up -d | | | Backend & CLI images | Monthly or CVE‑driven docker pull + docker compose up -d | | ||||||
| | Trivy DB             | 24 h cron via FeedMerge Service                                 | configurable (FeedMerge.Cron) | | | Trivy DB             | 24 h scheduler via Feedser (vulnerability ingest/merge/export service) | configurable via Feedser scheduler options | | ||||||
| | Docker Engine        | vendor LTS                                               | distro package manager         | | | Docker Engine        | vendor LTS                                               | distro package manager         | | ||||||
| | Host OS              | security repos enabled                                   | unattended‑upgrades            | | | Host OS              | security repos enabled                                   | unattended‑upgrades            | | ||||||
|  |  | ||||||
|   | |||||||
| @@ -16,7 +16,7 @@ contributors who need to extend coverage or diagnose failures. | |||||||
| | **1. Unit** | `xUnit` (<code>dotnet test</code>) | `*.Tests.csproj` | per PR / push | | | **1. Unit** | `xUnit` (<code>dotnet test</code>) | `*.Tests.csproj` | per PR / push | | ||||||
| | **2. Property‑based** | `FsCheck` | `SbomPropertyTests` | per PR | | | **2. Property‑based** | `FsCheck` | `SbomPropertyTests` | per PR | | ||||||
| | **3. Integration (API)** | `Testcontainers` suite | `test/Api.Integration` | per PR + nightly | | | **3. Integration (API)** | `Testcontainers` suite | `test/Api.Integration` | per PR + nightly | | ||||||
| | **4. Integration (DB‑merge)** | in‑memory Mongo + Redis | `FeedMerge.Integration` | per PR | | | **4. Integration (DB-merge)** | in-memory Mongo + Redis | `Feedser.Integration` (vulnerability ingest/merge/export service) | per PR | | ||||||
| | **5. Contract (gRPC)** | `Buf breaking` | `buf.yaml` files | per PR | | | **5. Contract (gRPC)** | `Buf breaking` | `buf.yaml` files | per PR | | ||||||
| | **6. Front‑end unit** | `Jest` | `ui/src/**/*.spec.ts` | per PR | | | **6. Front‑end unit** | `Jest` | `ui/src/**/*.spec.ts` | per PR | | ||||||
| | **7. Front‑end E2E** | `Playwright` | `ui/e2e/**` | nightly | | | **7. Front‑end E2E** | `Playwright` | `ui/e2e/**` | nightly | | ||||||
| @@ -70,7 +70,7 @@ flowchart LR | |||||||
|   I1 --> FE[Jest] |   I1 --> FE[Jest] | ||||||
|   FE --> E2E[Playwright] |   FE --> E2E[Playwright] | ||||||
|   E2E --> Lighthouse |   E2E --> Lighthouse | ||||||
|   Lighthouse --> INTEG2[FeedMerge] |   Lighthouse --> INTEG2[Feedser] | ||||||
|   INTEG2 --> LOAD[k6] |   INTEG2 --> LOAD[k6] | ||||||
|   LOAD --> CHAOS[pumba] |   LOAD --> CHAOS[pumba] | ||||||
|   CHAOS --> RELEASE[Attestation diff] |   CHAOS --> RELEASE[Attestation diff] | ||||||
|   | |||||||
| @@ -32,7 +32,7 @@ why the system leans *monolith‑plus‑plug‑ins*, and where extension points | |||||||
| graph TD | graph TD | ||||||
|     A(API Gateway) |     A(API Gateway) | ||||||
|     B1(Scanner Core<br/>.NET latest LTS) |     B1(Scanner Core<br/>.NET latest LTS) | ||||||
|     B2(FeedMerge service) |     B2(Feedser service\n(vuln ingest/merge/export)) | ||||||
|     B3(Policy Engine OPA) |     B3(Policy Engine OPA) | ||||||
|     C1(Redis 7) |     C1(Redis 7) | ||||||
|     C2(MongoDB 7) |     C2(MongoDB 7) | ||||||
| @@ -53,7 +53,7 @@ graph TD | |||||||
| | ---------------------------- | --------------------- | ---------------------------------------------------- | | | ---------------------------- | --------------------- | ---------------------------------------------------- | | ||||||
| | **API Gateway**              | ASP.NET Minimal API   | Auth (JWT), quotas, request routing                  | | | **API Gateway**              | ASP.NET Minimal API   | Auth (JWT), quotas, request routing                  | | ||||||
| | **Scanner Core**             | C# 12, Polly          | Layer diffing, SBOM generation, vuln correlation     | | | **Scanner Core**             | C# 12, Polly          | Layer diffing, SBOM generation, vuln correlation     | | ||||||
| | **FeedMerge**                | C# source‑gen workers | Consolidate NVD + regional CVE feeds into one SQLite | | | **Feedser (vulnerability ingest/merge/export service)** | C# source-gen workers | Consolidate NVD + regional CVE feeds into the canonical MongoDB store and drive JSON / Trivy DB exports | | ||||||
| | **Policy Engine**            | OPA (Rego)            | admission decisions, custom org rules                | | | **Policy Engine**            | OPA (Rego)            | admission decisions, custom org rules                | | ||||||
| | **Redis 7**                  | Key‑DB compatible     | LRU cache, quota counters                            | | | **Redis 7**                  | Key‑DB compatible     | LRU cache, quota counters                            | | ||||||
| | **MongoDB 7**                | WiredTiger            | SBOM & findings storage                              | | | **MongoDB 7**                | WiredTiger            | SBOM & findings storage                              | | ||||||
| @@ -121,7 +121,7 @@ Hot‑plugging is deferred until after v 1.0 for security review. | |||||||
| Although the default deployment is a single container, each sub‑service can be | Although the default deployment is a single container, each sub‑service can be | ||||||
| extracted: | extracted: | ||||||
|  |  | ||||||
| * FeedMerge → standalone cron pod. | * Feedser → standalone cron pod. | ||||||
| * Policy Engine → side‑car (OPA) with gRPC contract. | * Policy Engine → side‑car (OPA) with gRPC contract. | ||||||
| * ResultSink → queue worker (RabbitMQ or Azure Service Bus). | * ResultSink → queue worker (RabbitMQ or Azure Service Bus). | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,21 +1,20 @@ | |||||||
| @ -1,191 +0,0 @@ |  | ||||||
| # ARCHITECTURE.md — **StellaOps.Feedser** | # ARCHITECTURE.md — **StellaOps.Feedser** | ||||||
| 
 | 
 | ||||||
| > **Goal**: Build a sovereign-ready, self-hostable **feed-merge service** that ingests authoritative vulnerability sources, normalizes and de-duplicates them into **MongoDB**, and exports **JSON** and **Trivy-compatible DB** artifacts. | > **Goal**: Build a sovereign-ready, self-hostable **feed-merge service** that ingests authoritative vulnerability sources, normalizes and de-duplicates them into **MongoDB**, and exports **JSON** and **Trivy-compatible DB** artifacts. | ||||||
| > **Form factor**: Long-running **Web Service** with **REST APIs** (health, status, control) and an embedded **internal cron scheduler**. | > **Form factor**: Long-running **Web Service** with **REST APIs** (health, status, control) and an embedded **internal cron scheduler**. Controllable by StellaOps.Cli (# stella db ...) | ||||||
| > **No signing inside Feedser** (signing is a separate pipeline step). | > **No signing inside Feedser** (signing is a separate pipeline step). | ||||||
| > **Runtime SDK baseline**: .NET 10 Preview 7 (SDK 10.0.100-preview.7.25380.108) targeting `net10.0`, aligned with the deployed api.stella-ops.org service. | > **Runtime SDK baseline**: .NET 10 Preview 7 (SDK 10.0.100-preview.7.25380.108) targeting `net10.0`, aligned with the deployed api.stella-ops.org service. | ||||||
| > **Three explicit stages**: | > **Four explicit stages**: | ||||||
| > | > | ||||||
| > 1. **Source Download** → raw documents. | > 1. **Source Download** → raw documents. | ||||||
| > 2. **Merge + Dedupe + Normalization** → MongoDB canonical. | > 2. **Parse & Normalize** → schema-validated DTOs enriched with canonical identifiers. | ||||||
| > 3. **Export** → JSON or TrivyDB (full or delta), then (externally) sign/publish. | > 3. **Merge & Deduplicate** → precedence-aware canonical records persisted to MongoDB. | ||||||
|  | > 4. **Export** → JSON or TrivyDB (full or delta), then (externally) sign/publish. | ||||||
| 
 | 
 | ||||||
| --- | --- | ||||||
| 
 | 
 | ||||||
| ## 1) Naming & Solution Layout | ## 1) Naming & Solution Layout | ||||||
| 
 | 
 | ||||||
| **Solution root**: `StellaOps.Feedser` |  | ||||||
| **Source connectors** namespace prefix: `StellaOps.Feedser.Source.*` | **Source connectors** namespace prefix: `StellaOps.Feedser.Source.*` | ||||||
| **Exporters**: | **Exporters**: | ||||||
| 
 | 
 | ||||||
							
								
								
									
										254
									
								
								scripts/render_docs.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										254
									
								
								scripts/render_docs.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,254 @@ | |||||||
|  | #!/usr/bin/env python3 | ||||||
|  | """Render Markdown documentation under docs/ into a static HTML bundle. | ||||||
|  |  | ||||||
|  | The script converts every Markdown file into a standalone HTML document, | ||||||
|  | mirroring the original folder structure under the output directory. A | ||||||
|  | `manifest.json` file is also produced to list the generated documents and | ||||||
|  | surface basic metadata (title, source path, output path). | ||||||
|  |  | ||||||
|  | Usage: | ||||||
|  |     python scripts/render_docs.py --source docs --output build/docs-site | ||||||
|  |  | ||||||
|  | Dependencies: | ||||||
|  |     pip install markdown pygments | ||||||
|  | """ | ||||||
|  |  | ||||||
|  | from __future__ import annotations | ||||||
|  |  | ||||||
|  | import argparse | ||||||
|  | import json | ||||||
|  | import logging | ||||||
|  | import os | ||||||
|  | import shutil | ||||||
|  | from dataclasses import dataclass | ||||||
|  | from datetime import datetime, timezone | ||||||
|  | from pathlib import Path | ||||||
|  | from typing import Iterable, List | ||||||
|  |  | ||||||
|  | import markdown | ||||||
|  |  | ||||||
|  | # Enable fenced code blocks, tables, and definition lists. These cover the | ||||||
|  | # Markdown constructs heavily used across the documentation set. | ||||||
|  | MD_EXTENSIONS = [ | ||||||
|  |     "fenced_code", | ||||||
|  |     "codehilite", | ||||||
|  |     "tables", | ||||||
|  |     "toc", | ||||||
|  |     "def_list", | ||||||
|  |     "admonition", | ||||||
|  | ] | ||||||
|  |  | ||||||
|  | HTML_TEMPLATE = """<!DOCTYPE html> | ||||||
|  | <html lang=\"en\"> | ||||||
|  | <head> | ||||||
|  |   <meta charset=\"utf-8\" /> | ||||||
|  |   <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" /> | ||||||
|  |   <title>{title}</title> | ||||||
|  |   <style> | ||||||
|  |     :root {{ | ||||||
|  |       color-scheme: light dark; | ||||||
|  |       font-family: system-ui, -apple-system, Segoe UI, sans-serif; | ||||||
|  |       line-height: 1.6; | ||||||
|  |     }} | ||||||
|  |     body {{ | ||||||
|  |       margin: 2.5rem auto; | ||||||
|  |       padding: 0 1.5rem; | ||||||
|  |       max-width: 70ch; | ||||||
|  |       background: var(--background, #1118270d); | ||||||
|  |     }} | ||||||
|  |     pre {{ | ||||||
|  |       overflow: auto; | ||||||
|  |       padding: 1rem; | ||||||
|  |       background: #11182714; | ||||||
|  |       border-radius: 0.5rem; | ||||||
|  |     }} | ||||||
|  |     code {{ | ||||||
|  |       font-family: SFMono-Regular, Consolas, 'Liberation Mono', monospace; | ||||||
|  |       font-size: 0.95em; | ||||||
|  |     }} | ||||||
|  |     table {{ | ||||||
|  |       width: 100%; | ||||||
|  |       border-collapse: collapse; | ||||||
|  |       margin: 1rem 0; | ||||||
|  |     }} | ||||||
|  |     th, td {{ | ||||||
|  |       border: 1px solid #4b5563; | ||||||
|  |       padding: 0.5rem; | ||||||
|  |       text-align: left; | ||||||
|  |     }} | ||||||
|  |     a {{ | ||||||
|  |       color: #2563eb; | ||||||
|  |     }} | ||||||
|  |     footer {{ | ||||||
|  |       margin-top: 3rem; | ||||||
|  |       font-size: 0.85rem; | ||||||
|  |       color: #6b7280; | ||||||
|  |     }} | ||||||
|  |   </style> | ||||||
|  | </head> | ||||||
|  | <body> | ||||||
|  |   <main> | ||||||
|  | {body} | ||||||
|  |   </main> | ||||||
|  |   <footer> | ||||||
|  |     <p>Generated on {generated_at} UTC · Source: {source}</p> | ||||||
|  |   </footer> | ||||||
|  | </body> | ||||||
|  | </html> | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @dataclass | ||||||
|  | class DocEntry: | ||||||
|  |     source: Path | ||||||
|  |     output: Path | ||||||
|  |     title: str | ||||||
|  |  | ||||||
|  |     def to_manifest(self) -> dict[str, str]: | ||||||
|  |         return { | ||||||
|  |             "source": self.source.as_posix(), | ||||||
|  |             "output": self.output.as_posix(), | ||||||
|  |             "title": self.title, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def discover_markdown_files(source_root: Path) -> Iterable[Path]: | ||||||
|  |     for path in source_root.rglob("*.md"): | ||||||
|  |         if path.is_file(): | ||||||
|  |             yield path | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def read_title(markdown_text: str, fallback: str) -> str: | ||||||
|  |     for raw_line in markdown_text.splitlines(): | ||||||
|  |         line = raw_line.strip() | ||||||
|  |         if line.startswith("#"): | ||||||
|  |             return line.lstrip("#").strip() or fallback | ||||||
|  |     return fallback | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def convert_markdown(path: Path, source_root: Path, output_root: Path) -> DocEntry: | ||||||
|  |     relative = path.relative_to(source_root) | ||||||
|  |     output_path = output_root / relative.with_suffix(".html") | ||||||
|  |     output_path.parent.mkdir(parents=True, exist_ok=True) | ||||||
|  |  | ||||||
|  |     text = path.read_text(encoding="utf-8") | ||||||
|  |     html_body = markdown.markdown(text, extensions=MD_EXTENSIONS) | ||||||
|  |  | ||||||
|  |     title = read_title(text, fallback=relative.stem.replace("_", " ")) | ||||||
|  |     generated_at = datetime.now(tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S") | ||||||
|  |  | ||||||
|  |     output_path.write_text( | ||||||
|  |         HTML_TEMPLATE.format( | ||||||
|  |             title=title, | ||||||
|  |             body=html_body, | ||||||
|  |             generated_at=generated_at, | ||||||
|  |             source=relative.as_posix(), | ||||||
|  |         ), | ||||||
|  |         encoding="utf-8", | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     return DocEntry(source=relative, output=output_path.relative_to(output_root), title=title) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def copy_static_assets(source_root: Path, output_root: Path) -> None: | ||||||
|  |     for path in source_root.rglob("*"): | ||||||
|  |         if path.is_dir() or path.suffix.lower() == ".md": | ||||||
|  |             # Skip Markdown (already rendered separately). | ||||||
|  |             continue | ||||||
|  |         relative = path.relative_to(source_root) | ||||||
|  |         destination = output_root / relative | ||||||
|  |         destination.parent.mkdir(parents=True, exist_ok=True) | ||||||
|  |         destination.write_bytes(path.read_bytes()) | ||||||
|  |         logging.info("Copied asset %s", relative) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def write_manifest(entries: Iterable[DocEntry], output_root: Path) -> None: | ||||||
|  |     manifest_path = output_root / "manifest.json" | ||||||
|  |     manifest = [entry.to_manifest() for entry in entries] | ||||||
|  |     manifest_path.write_text(json.dumps(manifest, indent=2), encoding="utf-8") | ||||||
|  |     logging.info("Wrote manifest with %d entries", len(manifest)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def write_index(entries: List[DocEntry], output_root: Path) -> None: | ||||||
|  |     index_path = output_root / "index.html" | ||||||
|  |     generated_at = datetime.now(tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S") | ||||||
|  |  | ||||||
|  |     items = "\n".join( | ||||||
|  |         f"      <li><a href='{entry.output.as_posix()}'>{entry.title}</a>" f" · <code>{entry.source.as_posix()}</code></li>" | ||||||
|  |         for entry in sorted(entries, key=lambda e: e.title.lower()) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     html = f"""<!DOCTYPE html> | ||||||
|  | <html lang=\"en\"> | ||||||
|  | <head> | ||||||
|  |   <meta charset=\"utf-8\" /> | ||||||
|  |   <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" /> | ||||||
|  |   <title>Stella Ops Documentation Index</title> | ||||||
|  |   <style> | ||||||
|  |     body {{ | ||||||
|  |       margin: 2.5rem auto; | ||||||
|  |       padding: 0 1.5rem; | ||||||
|  |       max-width: 70ch; | ||||||
|  |       font-family: system-ui, -apple-system, 'Segoe UI', sans-serif; | ||||||
|  |       line-height: 1.6; | ||||||
|  |     }} | ||||||
|  |     h1 {{ font-size: 2.25rem; margin-bottom: 1rem; }} | ||||||
|  |     ul {{ list-style: none; padding: 0; }} | ||||||
|  |     li {{ margin-bottom: 0.75rem; }} | ||||||
|  |     code {{ background: #11182714; padding: 0.2rem 0.35rem; border-radius: 0.35rem; }} | ||||||
|  |   </style> | ||||||
|  | </head> | ||||||
|  | <body> | ||||||
|  |   <h1>Stella Ops Documentation</h1> | ||||||
|  |   <p>Generated on {generated_at} UTC</p> | ||||||
|  |   <ul> | ||||||
|  | {items} | ||||||
|  |   </ul> | ||||||
|  | </body> | ||||||
|  | </html> | ||||||
|  | """ | ||||||
|  |     index_path.write_text(html, encoding="utf-8") | ||||||
|  |     logging.info("Wrote HTML index with %d entries", len(entries)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def parse_args() -> argparse.Namespace: | ||||||
|  |     parser = argparse.ArgumentParser(description="Render documentation bundle") | ||||||
|  |     parser.add_argument("--source", default="docs", type=Path, help="Directory containing Markdown sources") | ||||||
|  |     parser.add_argument("--output", default=Path("build/docs-site"), type=Path, help="Directory for rendered output") | ||||||
|  |     parser.add_argument("--clean", action="store_true", help="Remove the output directory before rendering") | ||||||
|  |     return parser.parse_args() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def main() -> int: | ||||||
|  |     logging.basicConfig(level=logging.INFO, format="%(levelname)s %(message)s") | ||||||
|  |     args = parse_args() | ||||||
|  |  | ||||||
|  |     source_root: Path = args.source.resolve() | ||||||
|  |     output_root: Path = args.output.resolve() | ||||||
|  |  | ||||||
|  |     if not source_root.exists(): | ||||||
|  |         logging.error("Source directory %s does not exist", source_root) | ||||||
|  |         return os.EX_NOINPUT | ||||||
|  |  | ||||||
|  |     if args.clean and output_root.exists(): | ||||||
|  |         logging.info("Cleaning existing output directory %s", output_root) | ||||||
|  |         shutil.rmtree(output_root) | ||||||
|  |  | ||||||
|  |     output_root.mkdir(parents=True, exist_ok=True) | ||||||
|  |  | ||||||
|  |     entries: List[DocEntry] = [] | ||||||
|  |     for md_file in discover_markdown_files(source_root): | ||||||
|  |         entry = convert_markdown(md_file, source_root, output_root) | ||||||
|  |         entries.append(entry) | ||||||
|  |         logging.info("Rendered %s -> %s", entry.source, entry.output) | ||||||
|  |  | ||||||
|  |     write_manifest(entries, output_root) | ||||||
|  |     write_index(entries, output_root) | ||||||
|  |     copy_static_assets(source_root, output_root) | ||||||
|  |  | ||||||
|  |     logging.info("Documentation bundle available at %s", output_root) | ||||||
|  |     return os.EX_OK | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     raise SystemExit(main()) | ||||||
| @@ -6,7 +6,7 @@ | |||||||
|   </PropertyGroup> |   </PropertyGroup> | ||||||
| 
 | 
 | ||||||
|   <ItemGroup> |   <ItemGroup> | ||||||
|     <ProjectReference Update="..\..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj"> |     <ProjectReference Update="../StellaOps.Plugin/StellaOps.Plugin.csproj"> | ||||||
|       <Private>false</Private> |       <Private>false</Private> | ||||||
|       <ExcludeAssets>runtime</ExcludeAssets> |       <ExcludeAssets>runtime</ExcludeAssets> | ||||||
|     </ProjectReference> |     </ProjectReference> | ||||||
| @@ -0,0 +1,61 @@ | |||||||
|  | using System; | ||||||
|  | using System.Collections.Generic; | ||||||
|  | using System.IO; | ||||||
|  | using Microsoft.Extensions.Configuration; | ||||||
|  | using Microsoft.Extensions.DependencyInjection; | ||||||
|  | using Microsoft.Extensions.Options; | ||||||
|  | using StellaOps.Feedser.Core.Jobs; | ||||||
|  | using StellaOps.Plugin.Hosting; | ||||||
|  |  | ||||||
|  | namespace StellaOps.Feedser.Core.Tests; | ||||||
|  |  | ||||||
|  | public sealed class JobPluginRegistrationExtensionsTests | ||||||
|  | { | ||||||
|  |     [Fact] | ||||||
|  |     public void RegisterJobPluginRoutines_LoadsPluginsAndRegistersDefinitions() | ||||||
|  |     { | ||||||
|  |         var services = new ServiceCollection(); | ||||||
|  |         services.AddJobScheduler(); | ||||||
|  |  | ||||||
|  |         var configuration = new ConfigurationBuilder() | ||||||
|  |             .AddInMemoryCollection(new Dictionary<string, string?> | ||||||
|  |             { | ||||||
|  |                 ["plugin:test:timeoutSeconds"] = "45", | ||||||
|  |             }) | ||||||
|  |             .Build(); | ||||||
|  |  | ||||||
|  |         var assemblyPath = typeof(JobPluginRegistrationExtensionsTests).Assembly.Location; | ||||||
|  |         var pluginDirectory = Path.GetDirectoryName(assemblyPath)!; | ||||||
|  |         var pluginFile = Path.GetFileName(assemblyPath); | ||||||
|  |  | ||||||
|  |         var options = new PluginHostOptions | ||||||
|  |         { | ||||||
|  |             BaseDirectory = pluginDirectory, | ||||||
|  |             PluginsDirectory = pluginDirectory, | ||||||
|  |             EnsureDirectoryExists = false, | ||||||
|  |             RecursiveSearch = false, | ||||||
|  |         }; | ||||||
|  |         options.SearchPatterns.Add(pluginFile); | ||||||
|  |  | ||||||
|  |         services.RegisterJobPluginRoutines(configuration, options); | ||||||
|  |  | ||||||
|  |         Assert.Contains( | ||||||
|  |             services, | ||||||
|  |             descriptor => descriptor.ServiceType == typeof(PluginHostResult)); | ||||||
|  |  | ||||||
|  |         Assert.Contains( | ||||||
|  |             services, | ||||||
|  |             descriptor => descriptor.ServiceType.FullName == typeof(PluginRoutineExecuted).FullName); | ||||||
|  |  | ||||||
|  |         using var provider = services.BuildServiceProvider(); | ||||||
|  |         var schedulerOptions = provider.GetRequiredService<IOptions<JobSchedulerOptions>>().Value; | ||||||
|  |  | ||||||
|  |         Assert.True(schedulerOptions.Definitions.TryGetValue(PluginJob.JobKind, out var definition)); | ||||||
|  |         Assert.NotNull(definition); | ||||||
|  |         Assert.Equal(PluginJob.JobKind, definition.Kind); | ||||||
|  |         Assert.Equal("StellaOps.Feedser.Core.Tests.PluginJob", definition.JobType.FullName); | ||||||
|  |         Assert.Equal(TimeSpan.FromSeconds(45), definition.Timeout); | ||||||
|  |         Assert.Equal(TimeSpan.FromSeconds(5), definition.LeaseDuration); | ||||||
|  |         Assert.Equal("*/10 * * * *", definition.CronExpression); | ||||||
|  |     } | ||||||
|  | } | ||||||
							
								
								
									
										70
									
								
								src/StellaOps.Feedser.Core.Tests/JobSchedulerBuilderTests.cs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										70
									
								
								src/StellaOps.Feedser.Core.Tests/JobSchedulerBuilderTests.cs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,70 @@ | |||||||
|  | using System; | ||||||
|  | using Microsoft.Extensions.DependencyInjection; | ||||||
|  | using Microsoft.Extensions.Options; | ||||||
|  | using StellaOps.Feedser.Core.Jobs; | ||||||
|  |  | ||||||
|  | namespace StellaOps.Feedser.Core.Tests; | ||||||
|  |  | ||||||
|  | public sealed class JobSchedulerBuilderTests | ||||||
|  | { | ||||||
|  |     [Fact] | ||||||
|  |     public void AddJob_RegistersDefinitionWithExplicitMetadata() | ||||||
|  |     { | ||||||
|  |         var services = new ServiceCollection(); | ||||||
|  |         var builder = services.AddJobScheduler(); | ||||||
|  |  | ||||||
|  |         builder.AddJob<TestJob>( | ||||||
|  |             kind: "jobs:test", | ||||||
|  |             cronExpression: "*/5 * * * *", | ||||||
|  |             timeout: TimeSpan.FromMinutes(42), | ||||||
|  |             leaseDuration: TimeSpan.FromMinutes(7), | ||||||
|  |             enabled: false); | ||||||
|  |  | ||||||
|  |         using var provider = services.BuildServiceProvider(); | ||||||
|  |         var options = provider.GetRequiredService<IOptions<JobSchedulerOptions>>().Value; | ||||||
|  |  | ||||||
|  |         Assert.True(options.Definitions.TryGetValue("jobs:test", out var definition)); | ||||||
|  |         Assert.NotNull(definition); | ||||||
|  |         Assert.Equal(typeof(TestJob), definition.JobType); | ||||||
|  |         Assert.Equal(TimeSpan.FromMinutes(42), definition.Timeout); | ||||||
|  |         Assert.Equal(TimeSpan.FromMinutes(7), definition.LeaseDuration); | ||||||
|  |         Assert.Equal("*/5 * * * *", definition.CronExpression); | ||||||
|  |         Assert.False(definition.Enabled); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     [Fact] | ||||||
|  |     public void AddJob_UsesDefaults_WhenOptionalMetadataExcluded() | ||||||
|  |     { | ||||||
|  |         var services = new ServiceCollection(); | ||||||
|  |         var builder = services.AddJobScheduler(options => | ||||||
|  |         { | ||||||
|  |             options.DefaultTimeout = TimeSpan.FromSeconds(123); | ||||||
|  |             options.DefaultLeaseDuration = TimeSpan.FromSeconds(45); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |         builder.AddJob<DefaultedJob>(kind: "jobs:defaults"); | ||||||
|  |  | ||||||
|  |         using var provider = services.BuildServiceProvider(); | ||||||
|  |         var options = provider.GetRequiredService<IOptions<JobSchedulerOptions>>().Value; | ||||||
|  |  | ||||||
|  |         Assert.True(options.Definitions.TryGetValue("jobs:defaults", out var definition)); | ||||||
|  |         Assert.NotNull(definition); | ||||||
|  |         Assert.Equal(typeof(DefaultedJob), definition.JobType); | ||||||
|  |         Assert.Equal(TimeSpan.FromSeconds(123), definition.Timeout); | ||||||
|  |         Assert.Equal(TimeSpan.FromSeconds(45), definition.LeaseDuration); | ||||||
|  |         Assert.Null(definition.CronExpression); | ||||||
|  |         Assert.True(definition.Enabled); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private sealed class TestJob : IJob | ||||||
|  |     { | ||||||
|  |         public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) | ||||||
|  |             => Task.CompletedTask; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private sealed class DefaultedJob : IJob | ||||||
|  |     { | ||||||
|  |         public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) | ||||||
|  |             => Task.CompletedTask; | ||||||
|  |     } | ||||||
|  | } | ||||||
							
								
								
									
										42
									
								
								src/StellaOps.Feedser.Core.Tests/PluginRoutineFixtures.cs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								src/StellaOps.Feedser.Core.Tests/PluginRoutineFixtures.cs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,42 @@ | |||||||
|  | using System; | ||||||
|  | using System.Threading; | ||||||
|  | using System.Threading.Tasks; | ||||||
|  | using Microsoft.Extensions.Configuration; | ||||||
|  | using Microsoft.Extensions.DependencyInjection; | ||||||
|  | using StellaOps.DependencyInjection; | ||||||
|  | using StellaOps.Feedser.Core.Jobs; | ||||||
|  |  | ||||||
|  | namespace StellaOps.Feedser.Core.Tests; | ||||||
|  |  | ||||||
|  | public sealed class TestPluginRoutine : IDependencyInjectionRoutine | ||||||
|  | { | ||||||
|  |     public IServiceCollection Register(IServiceCollection services, IConfiguration configuration) | ||||||
|  |     { | ||||||
|  |         ArgumentNullException.ThrowIfNull(services); | ||||||
|  |         ArgumentNullException.ThrowIfNull(configuration); | ||||||
|  |  | ||||||
|  |         var builder = new JobSchedulerBuilder(services); | ||||||
|  |         var timeoutSeconds = configuration.GetValue<int?>("plugin:test:timeoutSeconds") ?? 30; | ||||||
|  |  | ||||||
|  |         builder.AddJob<PluginJob>( | ||||||
|  |             PluginJob.JobKind, | ||||||
|  |             cronExpression: "*/10 * * * *", | ||||||
|  |             timeout: TimeSpan.FromSeconds(timeoutSeconds), | ||||||
|  |             leaseDuration: TimeSpan.FromSeconds(5)); | ||||||
|  |  | ||||||
|  |         services.AddSingleton<PluginRoutineExecuted>(); | ||||||
|  |         return services; | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | public sealed class PluginRoutineExecuted | ||||||
|  | { | ||||||
|  | } | ||||||
|  |  | ||||||
|  | public sealed class PluginJob : IJob | ||||||
|  | { | ||||||
|  |     public const string JobKind = "plugin:test"; | ||||||
|  |  | ||||||
|  |     public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) | ||||||
|  |         => Task.CompletedTask; | ||||||
|  | } | ||||||
| @@ -0,0 +1,128 @@ | |||||||
|  | using System; | ||||||
|  | using System.Collections.Generic; | ||||||
|  | using System.Linq; | ||||||
|  | using System.Reflection; | ||||||
|  | using Microsoft.Extensions.Configuration; | ||||||
|  | using Microsoft.Extensions.DependencyInjection; | ||||||
|  | using Microsoft.Extensions.Logging; | ||||||
|  | using StellaOps.DependencyInjection; | ||||||
|  | using StellaOps.Plugin.Hosting; | ||||||
|  |  | ||||||
|  | namespace StellaOps.Feedser.Core.Jobs; | ||||||
|  |  | ||||||
|  | public static class JobPluginRegistrationExtensions | ||||||
|  | { | ||||||
|  |     public static IServiceCollection RegisterJobPluginRoutines( | ||||||
|  |         this IServiceCollection services, | ||||||
|  |         IConfiguration configuration, | ||||||
|  |         PluginHostOptions options, | ||||||
|  |         ILogger? logger = null) | ||||||
|  |     { | ||||||
|  |         ArgumentNullException.ThrowIfNull(services); | ||||||
|  |         ArgumentNullException.ThrowIfNull(configuration); | ||||||
|  |         ArgumentNullException.ThrowIfNull(options); | ||||||
|  |  | ||||||
|  |         var loadResult = PluginHost.LoadPlugins(options, logger); | ||||||
|  |  | ||||||
|  |         if (!services.Any(sd => sd.ServiceType == typeof(PluginHostResult))) | ||||||
|  |         { | ||||||
|  |             services.AddSingleton(loadResult); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         var currentServices = services; | ||||||
|  |         var seenRoutineTypes = new HashSet<string>(StringComparer.Ordinal); | ||||||
|  |  | ||||||
|  |         foreach (var plugin in loadResult.Plugins) | ||||||
|  |         { | ||||||
|  |             foreach (var routineType in GetRoutineTypes(plugin.Assembly)) | ||||||
|  |             { | ||||||
|  |                 if (!typeof(IDependencyInjectionRoutine).IsAssignableFrom(routineType)) | ||||||
|  |                 { | ||||||
|  |                     continue; | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 if (routineType.IsInterface || routineType.IsAbstract) | ||||||
|  |                 { | ||||||
|  |                     continue; | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 var routineKey = routineType.FullName ?? routineType.Name; | ||||||
|  |                 if (!seenRoutineTypes.Add(routineKey)) | ||||||
|  |                 { | ||||||
|  |                     continue; | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 IDependencyInjectionRoutine? routineInstance; | ||||||
|  |                 try | ||||||
|  |                 { | ||||||
|  |                     routineInstance = Activator.CreateInstance(routineType) as IDependencyInjectionRoutine; | ||||||
|  |                 } | ||||||
|  |                 catch (Exception ex) | ||||||
|  |                 { | ||||||
|  |                     logger?.LogWarning( | ||||||
|  |                         ex, | ||||||
|  |                         "Failed to create dependency injection routine {Routine} from plugin {Plugin}.", | ||||||
|  |                         routineType.FullName ?? routineType.Name, | ||||||
|  |                         plugin.Assembly.FullName ?? plugin.AssemblyPath); | ||||||
|  |                     continue; | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 if (routineInstance is null) | ||||||
|  |                 { | ||||||
|  |                     continue; | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 try | ||||||
|  |                 { | ||||||
|  |                     var updated = routineInstance.Register(currentServices, configuration); | ||||||
|  |                     if (updated is not null && !ReferenceEquals(updated, currentServices)) | ||||||
|  |                     { | ||||||
|  |                         currentServices = updated; | ||||||
|  |                     } | ||||||
|  |                 } | ||||||
|  |                 catch (Exception ex) | ||||||
|  |                 { | ||||||
|  |                     logger?.LogError( | ||||||
|  |                         ex, | ||||||
|  |                         "Dependency injection routine {Routine} from plugin {Plugin} threw during registration.", | ||||||
|  |                         routineType.FullName ?? routineType.Name, | ||||||
|  |                         plugin.Assembly.FullName ?? plugin.AssemblyPath); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         if (loadResult.MissingOrderedPlugins.Count > 0) | ||||||
|  |         { | ||||||
|  |             logger?.LogWarning( | ||||||
|  |                 "Missing ordered plugin(s): {Missing}", | ||||||
|  |                 string.Join(", ", loadResult.MissingOrderedPlugins)); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         return currentServices; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private static IEnumerable<Type> GetRoutineTypes(Assembly assembly) | ||||||
|  |     { | ||||||
|  |         if (assembly is null) | ||||||
|  |         { | ||||||
|  |             yield break; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         Type[] types; | ||||||
|  |         try | ||||||
|  |         { | ||||||
|  |             types = assembly.GetTypes(); | ||||||
|  |         } | ||||||
|  |         catch (ReflectionTypeLoadException ex) | ||||||
|  |         { | ||||||
|  |             types = ex.Types.Where(static t => t is not null)! | ||||||
|  |                 .Select(static t => t!) | ||||||
|  |                 .ToArray(); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         foreach (var type in types) | ||||||
|  |         { | ||||||
|  |             yield return type; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -14,5 +14,6 @@ | |||||||
|   </ItemGroup> |   </ItemGroup> | ||||||
|   <ItemGroup> |   <ItemGroup> | ||||||
|     <ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" /> |     <ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" /> | ||||||
|  |     <ProjectReference Include="..\StellaOps.Plugin\StellaOps.Plugin.csproj" /> | ||||||
|   </ItemGroup> |   </ItemGroup> | ||||||
| </Project> | </Project> | ||||||
| @@ -8,7 +8,7 @@ | |||||||
| |Run telemetry enrichment|BE-Core|Observability|DONE – `JobDiagnostics` ties activities & counters into coordinator/scheduler paths.| | |Run telemetry enrichment|BE-Core|Observability|DONE – `JobDiagnostics` ties activities & counters into coordinator/scheduler paths.| | ||||||
| |Deterministic params hashing|BE-Core|Core|DONE – `JobParametersHasher` creates SHA256 hash.| | |Deterministic params hashing|BE-Core|Core|DONE – `JobParametersHasher` creates SHA256 hash.| | ||||||
| |Golden tests for timeout/cancel|QA|Core|DONE – JobCoordinatorTests cover cancellation timeout path.| | |Golden tests for timeout/cancel|QA|Core|DONE – JobCoordinatorTests cover cancellation timeout path.| | ||||||
| |JobSchedulerBuilder options registry coverage|BE-Core|Core|TODO – verify cron/timeout/lease metadata persists for scheduler surfaces.| | |JobSchedulerBuilder options registry coverage|BE-Core|Core|DONE – added scheduler tests confirming cron/timeout/lease metadata persists via JobSchedulerOptions.| | ||||||
| |Plugin discovery + DI glue with PluginHost|BE-Core|Plugin libs|TODO – auto-register job routines for connectors/exporters.| | |Plugin discovery + DI glue with PluginHost|BE-Core|Plugin libs|DONE – JobPluginRegistrationExtensions now loads PluginHost routines and wires connector/exporter registrations.| | ||||||
| |Harden lease release error handling in JobCoordinator|BE-Core|Storage.Mongo|DONE – lease release failures now logged, wrapped, and drive run failure status; fire-and-forget execution guarded. Verified with `dotnet test --no-build --filter JobCoordinator`.| | |Harden lease release error handling in JobCoordinator|BE-Core|Storage.Mongo|DONE – lease release failures now logged, wrapped, and drive run failure status; fire-and-forget execution guarded. Verified with `dotnet test --no-build --filter JobCoordinator`.| | ||||||
| |Validate job trigger parameters for serialization|BE-Core|WebService|DONE – trigger parameters normalized/serialized with defensive checks returning InvalidParameters on failure. Full-suite `dotnet test --no-build` currently red from live connector fixture drift (Oracle/JVN/RedHat).| | |Validate job trigger parameters for serialization|BE-Core|WebService|DONE – trigger parameters normalized/serialized with defensive checks returning InvalidParameters on failure. Full-suite `dotnet test --no-build` currently red from live connector fixture drift (Oracle/JVN/RedHat).| | ||||||
| @@ -11,8 +11,8 @@ | |||||||
|     <ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" /> |     <ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" /> | ||||||
|     <ProjectReference Include="..\StellaOps.Feedser.Normalization\StellaOps.Feedser.Normalization.csproj" /> |     <ProjectReference Include="..\StellaOps.Feedser.Normalization\StellaOps.Feedser.Normalization.csproj" /> | ||||||
|     <ProjectReference Include="..\StellaOps.Feedser.Storage.Mongo\StellaOps.Feedser.Storage.Mongo.csproj" /> |     <ProjectReference Include="..\StellaOps.Feedser.Storage.Mongo\StellaOps.Feedser.Storage.Mongo.csproj" /> | ||||||
|     <ProjectReference Include="..\..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj" /> |     <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> | ||||||
|     <ProjectReference Include="..\..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> |     <ProjectReference Include="../StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> | ||||||
|   </ItemGroup> |   </ItemGroup> | ||||||
|   <ItemGroup> |   <ItemGroup> | ||||||
|     <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" /> |     <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" /> | ||||||
| @@ -0,0 +1,589 @@ | |||||||
|  | using System.Collections.Generic; | ||||||
|  | using System.Globalization; | ||||||
|  | using System.Linq; | ||||||
|  | using System.Runtime.CompilerServices; | ||||||
|  | using System.Security.Cryptography; | ||||||
|  | using System.Text; | ||||||
|  | using System.Text.Json; | ||||||
|  | using System.Threading; | ||||||
|  | using System.Threading.Tasks; | ||||||
|  | using Microsoft.Extensions.DependencyInjection; | ||||||
|  | using Microsoft.Extensions.Logging.Abstractions; | ||||||
|  | using Microsoft.Extensions.Options; | ||||||
|  | using StellaOps.Feedser.Exporter.Json; | ||||||
|  | using StellaOps.Feedser.Exporter.TrivyDb; | ||||||
|  | using StellaOps.Feedser.Models; | ||||||
|  | using StellaOps.Feedser.Storage.Mongo.Advisories; | ||||||
|  | using StellaOps.Feedser.Storage.Mongo.Exporting; | ||||||
|  |  | ||||||
|  | namespace StellaOps.Feedser.Exporter.TrivyDb.Tests; | ||||||
|  |  | ||||||
|  | public sealed class TrivyDbFeedExporterTests : IDisposable | ||||||
|  | { | ||||||
|  |     private readonly string _root; | ||||||
|  |     private readonly string _jsonRoot; | ||||||
|  |  | ||||||
|  |     public TrivyDbFeedExporterTests() | ||||||
|  |     { | ||||||
|  |         _root = Directory.CreateTempSubdirectory("feedser-trivy-exporter-tests").FullName; | ||||||
|  |         _jsonRoot = Path.Combine(_root, "tree"); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     [Fact] | ||||||
|  |     public async Task ExportAsync_SortsAdvisoriesByKeyDeterministically() | ||||||
|  |     { | ||||||
|  |         var advisoryB = CreateSampleAdvisory("CVE-2024-1002", "Second advisory"); | ||||||
|  |         var advisoryA = CreateSampleAdvisory("CVE-2024-1001", "First advisory"); | ||||||
|  |  | ||||||
|  |         var advisoryStore = new StubAdvisoryStore(advisoryB, advisoryA); | ||||||
|  |  | ||||||
|  |         var optionsValue = new TrivyDbExportOptions | ||||||
|  |         { | ||||||
|  |             OutputRoot = _root, | ||||||
|  |             ReferencePrefix = "example/trivy", | ||||||
|  |             KeepWorkingTree = false, | ||||||
|  |             Json = new JsonExportOptions | ||||||
|  |             { | ||||||
|  |                 OutputRoot = _jsonRoot, | ||||||
|  |                 MaintainLatestSymlink = false, | ||||||
|  |             }, | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         var options = Options.Create(optionsValue); | ||||||
|  |         var packageBuilder = new TrivyDbPackageBuilder(); | ||||||
|  |         var ociWriter = new TrivyDbOciWriter(); | ||||||
|  |         var planner = new TrivyDbExportPlanner(); | ||||||
|  |         var stateStore = new InMemoryExportStateStore(); | ||||||
|  |         var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-09-20T00:00:00Z", CultureInfo.InvariantCulture)); | ||||||
|  |         var stateManager = new ExportStateManager(stateStore, timeProvider); | ||||||
|  |         var builderMetadata = JsonSerializer.SerializeToUtf8Bytes(new | ||||||
|  |         { | ||||||
|  |             Version = 2, | ||||||
|  |             NextUpdate = "2024-09-21T00:00:00Z", | ||||||
|  |             UpdatedAt = "2024-09-20T00:00:00Z", | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |         var recordingBuilder = new RecordingTrivyDbBuilder(_root, builderMetadata); | ||||||
|  |         var orasPusher = new StubTrivyDbOrasPusher(); | ||||||
|  |         var exporter = new TrivyDbFeedExporter( | ||||||
|  |             advisoryStore, | ||||||
|  |             new VulnListJsonExportPathResolver(), | ||||||
|  |             options, | ||||||
|  |             packageBuilder, | ||||||
|  |             ociWriter, | ||||||
|  |             stateManager, | ||||||
|  |             planner, | ||||||
|  |             recordingBuilder, | ||||||
|  |             orasPusher, | ||||||
|  |             NullLogger<TrivyDbFeedExporter>.Instance, | ||||||
|  |             timeProvider); | ||||||
|  |  | ||||||
|  |         using var provider = new ServiceCollection().BuildServiceProvider(); | ||||||
|  |         await exporter.ExportAsync(provider, CancellationToken.None); | ||||||
|  |  | ||||||
|  |         var paths = recordingBuilder.LastRelativePaths; | ||||||
|  |         Assert.NotNull(paths); | ||||||
|  |  | ||||||
|  |         var sorted = paths!.OrderBy(static p => p, StringComparer.Ordinal).ToArray(); | ||||||
|  |         Assert.Equal(sorted, paths); | ||||||
|  |  | ||||||
|  |         advisoryStore.SetAdvisories(advisoryA, advisoryB); | ||||||
|  |         timeProvider.Advance(TimeSpan.FromMinutes(7)); | ||||||
|  |         await exporter.ExportAsync(provider, CancellationToken.None); | ||||||
|  |  | ||||||
|  |         var record = await stateStore.FindAsync(TrivyDbFeedExporter.ExporterId, CancellationToken.None); | ||||||
|  |         Assert.NotNull(record); | ||||||
|  |         Assert.Equal("20240920T000000Z", record!.BaseExportId); | ||||||
|  |         Assert.Single(recordingBuilder.ManifestDigests); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     [Fact] | ||||||
|  |     public async Task ExportAsync_SmallDatasetProducesDeterministicOciLayout() | ||||||
|  |     { | ||||||
|  |         var advisories = new[] | ||||||
|  |         { | ||||||
|  |             CreateSampleAdvisory("CVE-2024-3000", "Demo advisory 1"), | ||||||
|  |             CreateSampleAdvisory("CVE-2024-3001", "Demo advisory 2"), | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         var run1 = await RunDeterministicExportAsync(advisories); | ||||||
|  |         var run2 = await RunDeterministicExportAsync(advisories); | ||||||
|  |  | ||||||
|  |         Assert.Equal(run1.ManifestDigest, run2.ManifestDigest); | ||||||
|  |         Assert.Equal(run1.IndexJson, run2.IndexJson); | ||||||
|  |         Assert.Equal(run1.MetadataJson, run2.MetadataJson); | ||||||
|  |         Assert.Equal(run1.ManifestJson, run2.ManifestJson); | ||||||
|  |  | ||||||
|  |         var digests1 = run1.Blobs.Keys.OrderBy(static d => d, StringComparer.Ordinal).ToArray(); | ||||||
|  |         var digests2 = run2.Blobs.Keys.OrderBy(static d => d, StringComparer.Ordinal).ToArray(); | ||||||
|  |         Assert.Equal(digests1, digests2); | ||||||
|  |  | ||||||
|  |         foreach (var digest in digests1) | ||||||
|  |         { | ||||||
|  |             Assert.True(run2.Blobs.TryGetValue(digest, out var other), $"Missing digest {digest} in second run"); | ||||||
|  |             Assert.True(run1.Blobs[digest].SequenceEqual(other), $"Blob {digest} differs between runs"); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         using var metadataDoc = JsonDocument.Parse(run1.MetadataJson); | ||||||
|  |         Assert.Equal(2, metadataDoc.RootElement.GetProperty("advisoryCount").GetInt32()); | ||||||
|  |  | ||||||
|  |         using var manifestDoc = JsonDocument.Parse(run1.ManifestJson); | ||||||
|  |         Assert.Equal(TrivyDbMediaTypes.TrivyConfig, manifestDoc.RootElement.GetProperty("config").GetProperty("mediaType").GetString()); | ||||||
|  |         var layer = manifestDoc.RootElement.GetProperty("layers")[0]; | ||||||
|  |         Assert.Equal(TrivyDbMediaTypes.TrivyLayer, layer.GetProperty("mediaType").GetString()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     [Fact] | ||||||
|  |     public void ExportOptions_GetExportRoot_NormalizesRelativeRoot() | ||||||
|  |     { | ||||||
|  |         var options = new TrivyDbExportOptions | ||||||
|  |         { | ||||||
|  |             OutputRoot = Path.Combine("..", "exports", "trivy-test"), | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         var exportId = "20240901T000000Z"; | ||||||
|  |         var path = options.GetExportRoot(exportId); | ||||||
|  |  | ||||||
|  |         Assert.True(Path.IsPathRooted(path)); | ||||||
|  |         Assert.EndsWith(Path.Combine("exports", "trivy-test", exportId), path, StringComparison.Ordinal); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     [Fact] | ||||||
|  |     public async Task ExportAsync_PersistsStateAndSkipsWhenDigestUnchanged() | ||||||
|  |     { | ||||||
|  |         var advisory = CreateSampleAdvisory(); | ||||||
|  |         var advisoryStore = new StubAdvisoryStore(advisory); | ||||||
|  |  | ||||||
|  |         var optionsValue = new TrivyDbExportOptions | ||||||
|  |         { | ||||||
|  |             OutputRoot = _root, | ||||||
|  |             ReferencePrefix = "example/trivy", | ||||||
|  |             Json = new JsonExportOptions | ||||||
|  |             { | ||||||
|  |                 OutputRoot = _jsonRoot, | ||||||
|  |                 MaintainLatestSymlink = false, | ||||||
|  |             }, | ||||||
|  |             KeepWorkingTree = false, | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         var options = Options.Create(optionsValue); | ||||||
|  |         var packageBuilder = new TrivyDbPackageBuilder(); | ||||||
|  |         var ociWriter = new TrivyDbOciWriter(); | ||||||
|  |         var planner = new TrivyDbExportPlanner(); | ||||||
|  |         var stateStore = new InMemoryExportStateStore(); | ||||||
|  |         var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-09-01T00:00:00Z", CultureInfo.InvariantCulture)); | ||||||
|  |         var stateManager = new ExportStateManager(stateStore, timeProvider); | ||||||
|  |         var builderMetadata = JsonSerializer.SerializeToUtf8Bytes(new | ||||||
|  |         { | ||||||
|  |             Version = 2, | ||||||
|  |             NextUpdate = "2024-09-02T00:00:00Z", | ||||||
|  |             UpdatedAt = "2024-09-01T00:00:00Z", | ||||||
|  |         }); | ||||||
|  |         var builder = new StubTrivyDbBuilder(_root, builderMetadata); | ||||||
|  |         var orasPusher = new StubTrivyDbOrasPusher(); | ||||||
|  |         var exporter = new TrivyDbFeedExporter( | ||||||
|  |             advisoryStore, | ||||||
|  |             new VulnListJsonExportPathResolver(), | ||||||
|  |             options, | ||||||
|  |             packageBuilder, | ||||||
|  |             ociWriter, | ||||||
|  |             stateManager, | ||||||
|  |             planner, | ||||||
|  |             builder, | ||||||
|  |             orasPusher, | ||||||
|  |             NullLogger<TrivyDbFeedExporter>.Instance, | ||||||
|  |             timeProvider); | ||||||
|  |  | ||||||
|  |         using var provider = new ServiceCollection().BuildServiceProvider(); | ||||||
|  |         await exporter.ExportAsync(provider, CancellationToken.None); | ||||||
|  |  | ||||||
|  |         var record = await stateStore.FindAsync(TrivyDbFeedExporter.ExporterId, CancellationToken.None); | ||||||
|  |         Assert.NotNull(record); | ||||||
|  |         Assert.Equal("20240901T000000Z", record!.BaseExportId); | ||||||
|  |         Assert.False(string.IsNullOrEmpty(record.ExportCursor)); | ||||||
|  |  | ||||||
|  |         var baseExportId = record.BaseExportId ?? string.Empty; | ||||||
|  |         Assert.False(string.IsNullOrEmpty(baseExportId)); | ||||||
|  |         var firstExportDirectory = Path.Combine(_root, baseExportId); | ||||||
|  |         Assert.True(Directory.Exists(firstExportDirectory)); | ||||||
|  |  | ||||||
|  |         timeProvider.Advance(TimeSpan.FromMinutes(5)); | ||||||
|  |         await exporter.ExportAsync(provider, CancellationToken.None); | ||||||
|  |  | ||||||
|  |         var updatedRecord = await stateStore.FindAsync(TrivyDbFeedExporter.ExporterId, CancellationToken.None); | ||||||
|  |         Assert.NotNull(updatedRecord); | ||||||
|  |         Assert.Equal(record.UpdatedAt, updatedRecord!.UpdatedAt); | ||||||
|  |         Assert.Equal(record.LastFullDigest, updatedRecord.LastFullDigest); | ||||||
|  |  | ||||||
|  |         var skippedExportDirectory = Path.Combine(_root, "20240901T000500Z"); | ||||||
|  |         Assert.False(Directory.Exists(skippedExportDirectory)); | ||||||
|  |  | ||||||
|  |         Assert.Empty(orasPusher.Pushes); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     [Fact] | ||||||
|  |     public async Task ExportAsync_CreatesOfflineBundle() | ||||||
|  |     { | ||||||
|  |         var advisory = CreateSampleAdvisory(); | ||||||
|  |         var advisoryStore = new StubAdvisoryStore(advisory); | ||||||
|  |  | ||||||
|  |         var optionsValue = new TrivyDbExportOptions | ||||||
|  |         { | ||||||
|  |             OutputRoot = _root, | ||||||
|  |             ReferencePrefix = "example/trivy", | ||||||
|  |             Json = new JsonExportOptions | ||||||
|  |             { | ||||||
|  |                 OutputRoot = _jsonRoot, | ||||||
|  |                 MaintainLatestSymlink = false, | ||||||
|  |             }, | ||||||
|  |             KeepWorkingTree = false, | ||||||
|  |             OfflineBundle = new TrivyDbOfflineBundleOptions | ||||||
|  |             { | ||||||
|  |                 Enabled = true, | ||||||
|  |                 FileName = "{exportId}.bundle.tar.gz", | ||||||
|  |             }, | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         var options = Options.Create(optionsValue); | ||||||
|  |         var packageBuilder = new TrivyDbPackageBuilder(); | ||||||
|  |         var ociWriter = new TrivyDbOciWriter(); | ||||||
|  |         var planner = new TrivyDbExportPlanner(); | ||||||
|  |         var stateStore = new InMemoryExportStateStore(); | ||||||
|  |         var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-09-15T00:00:00Z", CultureInfo.InvariantCulture)); | ||||||
|  |         var stateManager = new ExportStateManager(stateStore, timeProvider); | ||||||
|  |         var builderMetadata = JsonSerializer.SerializeToUtf8Bytes(new | ||||||
|  |         { | ||||||
|  |             Version = 2, | ||||||
|  |             NextUpdate = "2024-09-16T00:00:00Z", | ||||||
|  |             UpdatedAt = "2024-09-15T00:00:00Z", | ||||||
|  |         }); | ||||||
|  |         var builder = new StubTrivyDbBuilder(_root, builderMetadata); | ||||||
|  |         var orasPusher = new StubTrivyDbOrasPusher(); | ||||||
|  |         var exporter = new TrivyDbFeedExporter( | ||||||
|  |             advisoryStore, | ||||||
|  |             new VulnListJsonExportPathResolver(), | ||||||
|  |             options, | ||||||
|  |             packageBuilder, | ||||||
|  |             ociWriter, | ||||||
|  |             stateManager, | ||||||
|  |             planner, | ||||||
|  |             builder, | ||||||
|  |             orasPusher, | ||||||
|  |             NullLogger<TrivyDbFeedExporter>.Instance, | ||||||
|  |             timeProvider); | ||||||
|  |  | ||||||
|  |         using var provider = new ServiceCollection().BuildServiceProvider(); | ||||||
|  |         await exporter.ExportAsync(provider, CancellationToken.None); | ||||||
|  |  | ||||||
|  |         var exportId = "20240915T000000Z"; | ||||||
|  |         var bundlePath = Path.Combine(_root, $"{exportId}.bundle.tar.gz"); | ||||||
|  |         Assert.True(File.Exists(bundlePath)); | ||||||
|  |         Assert.Empty(orasPusher.Pushes); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private static Advisory CreateSampleAdvisory( | ||||||
|  |         string advisoryKey = "CVE-2024-9999", | ||||||
|  |         string title = "Trivy Export Test") | ||||||
|  |     { | ||||||
|  |         return new Advisory( | ||||||
|  |             advisoryKey: advisoryKey, | ||||||
|  |             title: title, | ||||||
|  |             summary: null, | ||||||
|  |             language: "en", | ||||||
|  |             published: DateTimeOffset.Parse("2024-08-01T00:00:00Z", CultureInfo.InvariantCulture), | ||||||
|  |             modified: DateTimeOffset.Parse("2024-08-02T00:00:00Z", CultureInfo.InvariantCulture), | ||||||
|  |             severity: "medium", | ||||||
|  |             exploitKnown: false, | ||||||
|  |             aliases: new[] { "CVE-2024-9999" }, | ||||||
|  |             references: Array.Empty<AdvisoryReference>(), | ||||||
|  |             affectedPackages: Array.Empty<AffectedPackage>(), | ||||||
|  |             cvssMetrics: Array.Empty<CvssMetric>(), | ||||||
|  |             provenance: Array.Empty<AdvisoryProvenance>()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     public void Dispose() | ||||||
|  |     { | ||||||
|  |         try | ||||||
|  |         { | ||||||
|  |             if (Directory.Exists(_root)) | ||||||
|  |             { | ||||||
|  |                 Directory.Delete(_root, recursive: true); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         catch | ||||||
|  |         { | ||||||
|  |             // best effort cleanup | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private sealed class StubAdvisoryStore : IAdvisoryStore | ||||||
|  |     { | ||||||
|  |         private IReadOnlyList<Advisory> _advisories; | ||||||
|  |  | ||||||
|  |         public StubAdvisoryStore(params Advisory[] advisories) | ||||||
|  |         { | ||||||
|  |             _advisories = advisories; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public void SetAdvisories(params Advisory[] advisories) | ||||||
|  |         { | ||||||
|  |             _advisories = advisories; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken) | ||||||
|  |             => Task.FromResult(_advisories); | ||||||
|  |  | ||||||
|  |         public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken) | ||||||
|  |             => Task.FromResult<Advisory?>(_advisories.FirstOrDefault(a => a.AdvisoryKey == advisoryKey)); | ||||||
|  |  | ||||||
|  |         public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken) | ||||||
|  |             => Task.CompletedTask; | ||||||
|  |  | ||||||
|  |         public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken) | ||||||
|  |         { | ||||||
|  |             return EnumerateAsync(cancellationToken); | ||||||
|  |  | ||||||
|  |             async IAsyncEnumerable<Advisory> EnumerateAsync([EnumeratorCancellation] CancellationToken ct) | ||||||
|  |             { | ||||||
|  |                 foreach (var advisory in _advisories) | ||||||
|  |                 { | ||||||
|  |                     ct.ThrowIfCancellationRequested(); | ||||||
|  |                     yield return advisory; | ||||||
|  |                     await Task.Yield(); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private sealed class InMemoryExportStateStore : IExportStateStore | ||||||
|  |     { | ||||||
|  |         private ExportStateRecord? _record; | ||||||
|  |  | ||||||
|  |         public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken) | ||||||
|  |             => Task.FromResult(_record); | ||||||
|  |  | ||||||
|  |         public Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken) | ||||||
|  |         { | ||||||
|  |             _record = record; | ||||||
|  |             return Task.FromResult(record); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private sealed class TestTimeProvider : TimeProvider | ||||||
|  |     { | ||||||
|  |         private DateTimeOffset _now; | ||||||
|  |  | ||||||
|  |         public TestTimeProvider(DateTimeOffset start) => _now = start; | ||||||
|  |  | ||||||
|  |         public override DateTimeOffset GetUtcNow() => _now; | ||||||
|  |  | ||||||
|  |         public void Advance(TimeSpan delta) => _now = _now.Add(delta); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private sealed class StubTrivyDbBuilder : ITrivyDbBuilder | ||||||
|  |     { | ||||||
|  |         private readonly string _root; | ||||||
|  |         private readonly byte[] _metadata; | ||||||
|  |  | ||||||
|  |         public StubTrivyDbBuilder(string root, byte[] metadata) | ||||||
|  |         { | ||||||
|  |             _root = root; | ||||||
|  |             _metadata = metadata; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public Task<TrivyDbBuilderResult> BuildAsync( | ||||||
|  |             JsonExportResult jsonTree, | ||||||
|  |             DateTimeOffset exportedAt, | ||||||
|  |             string exportId, | ||||||
|  |             CancellationToken cancellationToken) | ||||||
|  |         { | ||||||
|  |             var workingDirectory = Directory.CreateDirectory(Path.Combine(_root, $"builder-{exportId}")).FullName; | ||||||
|  |             var archivePath = Path.Combine(workingDirectory, "db.tar.gz"); | ||||||
|  |             var payload = new byte[] { 0x1, 0x2, 0x3, 0x4 }; | ||||||
|  |             File.WriteAllBytes(archivePath, payload); | ||||||
|  |             using var sha256 = SHA256.Create(); | ||||||
|  |             var digest = "sha256:" + Convert.ToHexString(sha256.ComputeHash(payload)).ToLowerInvariant(); | ||||||
|  |             var length = payload.Length; | ||||||
|  |  | ||||||
|  |             return Task.FromResult(new TrivyDbBuilderResult( | ||||||
|  |                 archivePath, | ||||||
|  |                 digest, | ||||||
|  |                 length, | ||||||
|  |                 _metadata, | ||||||
|  |                 workingDirectory)); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private sealed class RecordingTrivyDbBuilder : ITrivyDbBuilder | ||||||
|  |     { | ||||||
|  |         private readonly string _root; | ||||||
|  |         private readonly byte[] _metadata; | ||||||
|  |         private readonly List<string> _manifestDigests = new(); | ||||||
|  |  | ||||||
|  |         public RecordingTrivyDbBuilder(string root, byte[] metadata) | ||||||
|  |         { | ||||||
|  |             _root = root; | ||||||
|  |             _metadata = metadata; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public IReadOnlyList<string> ManifestDigests => _manifestDigests; | ||||||
|  |         public string[]? LastRelativePaths { get; private set; } | ||||||
|  |  | ||||||
|  |         public Task<TrivyDbBuilderResult> BuildAsync( | ||||||
|  |             JsonExportResult jsonTree, | ||||||
|  |             DateTimeOffset exportedAt, | ||||||
|  |             string exportId, | ||||||
|  |             CancellationToken cancellationToken) | ||||||
|  |         { | ||||||
|  |             LastRelativePaths = jsonTree.Files.Select(static file => file.RelativePath).ToArray(); | ||||||
|  |  | ||||||
|  |             var workingDirectory = Directory.CreateDirectory(Path.Combine(_root, $"builder-{exportId}")).FullName; | ||||||
|  |             var archivePath = Path.Combine(workingDirectory, "db.tar.gz"); | ||||||
|  |             var payload = new byte[] { 0x5, 0x6, 0x7, 0x8 }; | ||||||
|  |             File.WriteAllBytes(archivePath, payload); | ||||||
|  |             using var sha256 = SHA256.Create(); | ||||||
|  |             var digest = "sha256:" + Convert.ToHexString(sha256.ComputeHash(payload)).ToLowerInvariant(); | ||||||
|  |             _manifestDigests.Add(digest); | ||||||
|  |  | ||||||
|  |             return Task.FromResult(new TrivyDbBuilderResult( | ||||||
|  |                 archivePath, | ||||||
|  |                 digest, | ||||||
|  |                 payload.Length, | ||||||
|  |                 _metadata, | ||||||
|  |                 workingDirectory)); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private sealed record RunArtifacts( | ||||||
|  |         string ExportId, | ||||||
|  |         string ManifestDigest, | ||||||
|  |         string IndexJson, | ||||||
|  |         string MetadataJson, | ||||||
|  |         string ManifestJson, | ||||||
|  |         IReadOnlyDictionary<string, byte[]> Blobs); | ||||||
|  |  | ||||||
|  |     private async Task<RunArtifacts> RunDeterministicExportAsync(IReadOnlyList<Advisory> advisories) | ||||||
|  |     { | ||||||
|  |         var workspace = Path.Combine(_root, $"deterministic-{Guid.NewGuid():N}"); | ||||||
|  |         var jsonRoot = Path.Combine(workspace, "tree"); | ||||||
|  |         Directory.CreateDirectory(workspace); | ||||||
|  |  | ||||||
|  |         var advisoryStore = new StubAdvisoryStore(advisories.ToArray()); | ||||||
|  |  | ||||||
|  |         var optionsValue = new TrivyDbExportOptions | ||||||
|  |         { | ||||||
|  |             OutputRoot = workspace, | ||||||
|  |             ReferencePrefix = "example/trivy", | ||||||
|  |             KeepWorkingTree = true, | ||||||
|  |             Json = new JsonExportOptions | ||||||
|  |             { | ||||||
|  |                 OutputRoot = jsonRoot, | ||||||
|  |                 MaintainLatestSymlink = false, | ||||||
|  |             }, | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         var exportedAt = DateTimeOffset.Parse("2024-10-01T00:00:00Z", CultureInfo.InvariantCulture); | ||||||
|  |         var options = Options.Create(optionsValue); | ||||||
|  |         var packageBuilder = new TrivyDbPackageBuilder(); | ||||||
|  |         var ociWriter = new TrivyDbOciWriter(); | ||||||
|  |         var planner = new TrivyDbExportPlanner(); | ||||||
|  |         var stateStore = new InMemoryExportStateStore(); | ||||||
|  |         var timeProvider = new TestTimeProvider(exportedAt); | ||||||
|  |         var stateManager = new ExportStateManager(stateStore, timeProvider); | ||||||
|  |         var builderMetadata = JsonSerializer.SerializeToUtf8Bytes(new | ||||||
|  |         { | ||||||
|  |             Version = 2, | ||||||
|  |             NextUpdate = "2024-10-02T00:00:00Z", | ||||||
|  |             UpdatedAt = "2024-10-01T00:00:00Z", | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |         var builder = new DeterministicTrivyDbBuilder(workspace, builderMetadata); | ||||||
|  |         var orasPusher = new StubTrivyDbOrasPusher(); | ||||||
|  |         var exporter = new TrivyDbFeedExporter( | ||||||
|  |             advisoryStore, | ||||||
|  |             new VulnListJsonExportPathResolver(), | ||||||
|  |             options, | ||||||
|  |             packageBuilder, | ||||||
|  |             ociWriter, | ||||||
|  |             stateManager, | ||||||
|  |             planner, | ||||||
|  |             builder, | ||||||
|  |             orasPusher, | ||||||
|  |             NullLogger<TrivyDbFeedExporter>.Instance, | ||||||
|  |             timeProvider); | ||||||
|  |  | ||||||
|  |         using var provider = new ServiceCollection().BuildServiceProvider(); | ||||||
|  |         await exporter.ExportAsync(provider, CancellationToken.None); | ||||||
|  |  | ||||||
|  |         var exportId = exportedAt.ToString(optionsValue.TagFormat, CultureInfo.InvariantCulture); | ||||||
|  |         var layoutPath = Path.Combine(workspace, exportId); | ||||||
|  |  | ||||||
|  |         var indexJson = await File.ReadAllTextAsync(Path.Combine(layoutPath, "index.json"), Encoding.UTF8); | ||||||
|  |         var metadataJson = await File.ReadAllTextAsync(Path.Combine(layoutPath, "metadata.json"), Encoding.UTF8); | ||||||
|  |  | ||||||
|  |         using var indexDoc = JsonDocument.Parse(indexJson); | ||||||
|  |         var manifestNode = indexDoc.RootElement.GetProperty("manifests")[0]; | ||||||
|  |         var manifestDigest = manifestNode.GetProperty("digest").GetString()!; | ||||||
|  |  | ||||||
|  |         var manifestHex = manifestDigest[7..]; | ||||||
|  |         var manifestJson = await File.ReadAllTextAsync(Path.Combine(layoutPath, "blobs", "sha256", manifestHex), Encoding.UTF8); | ||||||
|  |  | ||||||
|  |         var blobs = new Dictionary<string, byte[]>(StringComparer.Ordinal); | ||||||
|  |         var blobsRoot = Path.Combine(layoutPath, "blobs", "sha256"); | ||||||
|  |         foreach (var file in Directory.GetFiles(blobsRoot)) | ||||||
|  |         { | ||||||
|  |             var name = Path.GetFileName(file); | ||||||
|  |             var content = await File.ReadAllBytesAsync(file); | ||||||
|  |             blobs[name] = content; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         Directory.Delete(workspace, recursive: true); | ||||||
|  |  | ||||||
|  |         return new RunArtifacts(exportId, manifestDigest, indexJson, metadataJson, manifestJson, blobs); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private sealed class DeterministicTrivyDbBuilder : ITrivyDbBuilder | ||||||
|  |     { | ||||||
|  |         private readonly string _root; | ||||||
|  |         private readonly byte[] _metadata; | ||||||
|  |         private readonly byte[] _payload; | ||||||
|  |  | ||||||
|  |         public DeterministicTrivyDbBuilder(string root, byte[] metadata) | ||||||
|  |         { | ||||||
|  |             _root = root; | ||||||
|  |             _metadata = metadata; | ||||||
|  |             _payload = new byte[] { 0x21, 0x22, 0x23, 0x24, 0x25 }; | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         public Task<TrivyDbBuilderResult> BuildAsync( | ||||||
|  |             JsonExportResult jsonTree, | ||||||
|  |             DateTimeOffset exportedAt, | ||||||
|  |             string exportId, | ||||||
|  |             CancellationToken cancellationToken) | ||||||
|  |         { | ||||||
|  |             var workingDirectory = Directory.CreateDirectory(Path.Combine(_root, $"builder-{exportId}")).FullName; | ||||||
|  |             var archivePath = Path.Combine(workingDirectory, "db.tar.gz"); | ||||||
|  |             File.WriteAllBytes(archivePath, _payload); | ||||||
|  |             using var sha256 = SHA256.Create(); | ||||||
|  |             var digest = "sha256:" + Convert.ToHexString(sha256.ComputeHash(_payload)).ToLowerInvariant(); | ||||||
|  |  | ||||||
|  |             return Task.FromResult(new TrivyDbBuilderResult( | ||||||
|  |                 archivePath, | ||||||
|  |                 digest, | ||||||
|  |                 _payload.Length, | ||||||
|  |                 _metadata, | ||||||
|  |                 workingDirectory)); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     private sealed class StubTrivyDbOrasPusher : ITrivyDbOrasPusher | ||||||
|  |     { | ||||||
|  |         public List<(string Layout, string Reference, string ExportId)> Pushes { get; } = new(); | ||||||
|  |  | ||||||
|  |         public Task PushAsync(string layoutPath, string reference, string exportId, CancellationToken cancellationToken) | ||||||
|  |         { | ||||||
|  |             Pushes.Add((layoutPath, reference, exportId)); | ||||||
|  |             return Task.CompletedTask; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -11,8 +11,8 @@ | |||||||
|     <ProjectReference Include="..\StellaOps.Feedser.Exporter.Json\StellaOps.Feedser.Exporter.Json.csproj" /> |     <ProjectReference Include="..\StellaOps.Feedser.Exporter.Json\StellaOps.Feedser.Exporter.Json.csproj" /> | ||||||
|     <ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" /> |     <ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" /> | ||||||
|     <ProjectReference Include="..\StellaOps.Feedser.Storage.Mongo\StellaOps.Feedser.Storage.Mongo.csproj" /> |     <ProjectReference Include="..\StellaOps.Feedser.Storage.Mongo\StellaOps.Feedser.Storage.Mongo.csproj" /> | ||||||
|     <ProjectReference Include="..\..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> |     <ProjectReference Include="../StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> | ||||||
|     <ProjectReference Include="..\..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj" /> |     <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> | ||||||
|   </ItemGroup> |   </ItemGroup> | ||||||
|   <ItemGroup> |   <ItemGroup> | ||||||
|     <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" /> |     <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" /> | ||||||
| @@ -6,8 +6,8 @@ | |||||||
| |Pack db.tar.gz + metadata.json|BE-Export|Exporters|DONE – Builder output re-packed with fixed timestamps and zeroed gzip mtime.| | |Pack db.tar.gz + metadata.json|BE-Export|Exporters|DONE – Builder output re-packed with fixed timestamps and zeroed gzip mtime.| | ||||||
| |ORAS push support|BE-Export|Exporters|DONE – Optional `TrivyDbOrasPusher` shells `oras cp --from-oci-layout` with configurable args/env.| | |ORAS push support|BE-Export|Exporters|DONE – Optional `TrivyDbOrasPusher` shells `oras cp --from-oci-layout` with configurable args/env.| | ||||||
| |Offline bundle toggle|BE-Export|Exporters|DONE – Deterministic OCI layout bundle emitted when enabled.| | |Offline bundle toggle|BE-Export|Exporters|DONE – Deterministic OCI layout bundle emitted when enabled.| | ||||||
| |Deterministic ordering of advisories|BE-Export|Models|TODO – Sort by advisoryKey; stable array orders.| | |Deterministic ordering of advisories|BE-Export|Models|DONE – exporter now loads advisories, sorts by advisoryKey, and emits sorted JSON trees with deterministic OCI payloads.| | ||||||
| |End-to-end tests with small dataset|QA|Exporters|TODO – Assert media types and reproducible digests across runs.| | |End-to-end tests with small dataset|QA|Exporters|DONE – added deterministic round-trip test covering OCI layout, media types, and digest stability w/ repeated inputs.| | ||||||
| |ExportState persistence & idempotence|BE-Export|Storage.Mongo|DOING – `ExportStateManager` keeps stable base export metadata; delta reset remains pending.| | |ExportState persistence & idempotence|BE-Export|Storage.Mongo|DOING – `ExportStateManager` keeps stable base export metadata; delta reset remains pending.| | ||||||
| |Streamed package building to avoid large copies|BE-Export|Exporters|TODO – refactor package writer to stream without double-buffering metadata/archive payloads.| | |Streamed package building to avoid large copies|BE-Export|Exporters|DONE – metadata/config now reuse backing arrays and OCI writer streams directly without double buffering.| | ||||||
| |Plan incremental/delta exports|BE-Export|Exporters|TODO – design reuse of existing blobs/layers when inputs unchanged instead of rewriting full trees each run.| | |Plan incremental/delta exports|BE-Export|Exporters|TODO – design reuse of existing blobs/layers when inputs unchanged instead of rewriting full trees each run.| | ||||||
| @@ -1,5 +1,6 @@ | |||||||
| using System; | using System; | ||||||
| using System.IO; | using System.IO; | ||||||
|  | using System.Runtime.InteropServices; | ||||||
| using System.Threading; | using System.Threading; | ||||||
| using System.Threading.Tasks; | using System.Threading.Tasks; | ||||||
| 
 | 
 | ||||||
| @@ -32,9 +33,12 @@ public sealed class TrivyDbBlob | |||||||
|             return new TrivyDbBlob(static _ => ValueTask.FromResult<Stream>(Stream.Null), 0); |             return new TrivyDbBlob(static _ => ValueTask.FromResult<Stream>(Stream.Null), 0); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         return new TrivyDbBlob( |         if (MemoryMarshal.TryGetArray(payload, out ArraySegment<byte> segment) && segment.Array is not null && segment.Offset == 0) | ||||||
|             cancellationToken => ValueTask.FromResult<Stream>(new MemoryStream(payload.ToArray(), writable: false)), |         { | ||||||
|             payload.Length); |             return FromArray(segment.Array); | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |         return FromArray(payload.ToArray()); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     public static TrivyDbBlob FromFile(string path, long length) |     public static TrivyDbBlob FromFile(string path, long length) | ||||||
| @@ -59,4 +63,16 @@ public sealed class TrivyDbBlob | |||||||
|                 options: FileOptions.Asynchronous | FileOptions.SequentialScan)), |                 options: FileOptions.Asynchronous | FileOptions.SequentialScan)), | ||||||
|             length); |             length); | ||||||
|     } |     } | ||||||
|  | 
 | ||||||
|  |     public static TrivyDbBlob FromArray(byte[] buffer) | ||||||
|  |     { | ||||||
|  |         if (buffer is null) | ||||||
|  |         { | ||||||
|  |             throw new ArgumentNullException(nameof(buffer)); | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |         return new TrivyDbBlob( | ||||||
|  |             _ => ValueTask.FromResult<Stream>(new MemoryStream(buffer, writable: false)), | ||||||
|  |             buffer.LongLength); | ||||||
|  |     } | ||||||
| } | } | ||||||
| @@ -1,4 +1,5 @@ | |||||||
| using System; | using System; | ||||||
|  | using System.Collections.Generic; | ||||||
| using System.Globalization; | using System.Globalization; | ||||||
| using System.IO; | using System.IO; | ||||||
| using System.IO.Compression; | using System.IO.Compression; | ||||||
| @@ -12,6 +13,7 @@ using System.Formats.Tar; | |||||||
| using Microsoft.Extensions.Logging; | using Microsoft.Extensions.Logging; | ||||||
| using Microsoft.Extensions.Options; | using Microsoft.Extensions.Options; | ||||||
| using StellaOps.Feedser.Exporter.Json; | using StellaOps.Feedser.Exporter.Json; | ||||||
|  | using StellaOps.Feedser.Models; | ||||||
| using StellaOps.Feedser.Storage.Mongo.Advisories; | using StellaOps.Feedser.Storage.Mongo.Advisories; | ||||||
| using StellaOps.Feedser.Storage.Mongo.Exporting; | using StellaOps.Feedser.Storage.Mongo.Exporting; | ||||||
| using StellaOps.Plugin; | using StellaOps.Plugin; | ||||||
| @@ -74,8 +76,8 @@ public sealed class TrivyDbFeedExporter : IFeedExporter | |||||||
|         _logger.LogInformation("Starting Trivy DB export {ExportId}", exportId); |         _logger.LogInformation("Starting Trivy DB export {ExportId}", exportId); | ||||||
| 
 | 
 | ||||||
|         var jsonBuilder = new JsonExportSnapshotBuilder(_options.Json, _pathResolver); |         var jsonBuilder = new JsonExportSnapshotBuilder(_options.Json, _pathResolver); | ||||||
|         var advisoryStream = _advisoryStore.StreamAsync(cancellationToken); |         var advisories = await LoadAdvisoriesAsync(cancellationToken).ConfigureAwait(false); | ||||||
|         var jsonResult = await jsonBuilder.WriteAsync(advisoryStream, exportedAt, exportId, cancellationToken).ConfigureAwait(false); |         var jsonResult = await jsonBuilder.WriteAsync(advisories, exportedAt, exportId, cancellationToken).ConfigureAwait(false); | ||||||
| 
 | 
 | ||||||
|         _logger.LogInformation( |         _logger.LogInformation( | ||||||
|             "Prepared Trivy JSON tree {ExportId} with {AdvisoryCount} advisories ({Bytes} bytes)", |             "Prepared Trivy JSON tree {ExportId} with {AdvisoryCount} advisories ({Bytes} bytes)", | ||||||
| @@ -150,6 +152,23 @@ public sealed class TrivyDbFeedExporter : IFeedExporter | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  |     private async Task<IReadOnlyList<Advisory>> LoadAdvisoriesAsync(CancellationToken cancellationToken) | ||||||
|  |     { | ||||||
|  |         var advisories = new List<Advisory>(); | ||||||
|  |         await foreach (var advisory in _advisoryStore.StreamAsync(cancellationToken).ConfigureAwait(false)) | ||||||
|  |         { | ||||||
|  |             if (advisory is null) | ||||||
|  |             { | ||||||
|  |                 continue; | ||||||
|  |             } | ||||||
|  | 
 | ||||||
|  |             advisories.Add(advisory); | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |         advisories.Sort(static (left, right) => string.CompareOrdinal(left.AdvisoryKey, right.AdvisoryKey)); | ||||||
|  |         return advisories; | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|     private byte[] CreateMetadataJson( |     private byte[] CreateMetadataJson( | ||||||
|         ReadOnlyMemory<byte> builderMetadata, |         ReadOnlyMemory<byte> builderMetadata, | ||||||
|         string treeDigest, |         string treeDigest, | ||||||
| @@ -53,7 +53,7 @@ public sealed class TrivyDbOciWriter | |||||||
|         Directory.CreateDirectory(root); |         Directory.CreateDirectory(root); | ||||||
|         var timestamp = package.Config.GeneratedAt.UtcDateTime; |         var timestamp = package.Config.GeneratedAt.UtcDateTime; | ||||||
| 
 | 
 | ||||||
|         await WriteFileAsync(Path.Combine(root, "metadata.json"), package.MetadataJson.ToArray(), timestamp, cancellationToken).ConfigureAwait(false); |         await WriteFileAsync(Path.Combine(root, "metadata.json"), package.MetadataJson, timestamp, cancellationToken).ConfigureAwait(false); | ||||||
|         await WriteFileAsync(Path.Combine(root, "oci-layout"), OciLayoutBytes, timestamp, cancellationToken).ConfigureAwait(false); |         await WriteFileAsync(Path.Combine(root, "oci-layout"), OciLayoutBytes, timestamp, cancellationToken).ConfigureAwait(false); | ||||||
| 
 | 
 | ||||||
|         var blobsRoot = Path.Combine(root, "blobs", "sha256"); |         var blobsRoot = Path.Combine(root, "blobs", "sha256"); | ||||||
| @@ -96,7 +96,7 @@ public sealed class TrivyDbOciWriter | |||||||
|         return new TrivyDbOciWriteResult(root, manifestDigest, blobDigests); |         return new TrivyDbOciWriteResult(root, manifestDigest, blobDigests); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     private static async Task WriteFileAsync(string path, byte[] bytes, DateTime utcTimestamp, CancellationToken cancellationToken) |     private static async Task WriteFileAsync(string path, ReadOnlyMemory<byte> bytes, DateTime utcTimestamp, CancellationToken cancellationToken) | ||||||
|     { |     { | ||||||
|         var directory = Path.GetDirectoryName(path); |         var directory = Path.GetDirectoryName(path); | ||||||
|         if (!string.IsNullOrEmpty(directory)) |         if (!string.IsNullOrEmpty(directory)) | ||||||
| @@ -105,7 +105,15 @@ public sealed class TrivyDbOciWriter | |||||||
|             Directory.SetLastWriteTimeUtc(directory, utcTimestamp); |             Directory.SetLastWriteTimeUtc(directory, utcTimestamp); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         await File.WriteAllBytesAsync(path, bytes, cancellationToken).ConfigureAwait(false); |         await using var destination = new FileStream( | ||||||
|  |             path, | ||||||
|  |             FileMode.Create, | ||||||
|  |             FileAccess.Write, | ||||||
|  |             FileShare.None, | ||||||
|  |             bufferSize: 81920, | ||||||
|  |             options: FileOptions.Asynchronous | FileOptions.SequentialScan); | ||||||
|  |         await destination.WriteAsync(bytes, cancellationToken).ConfigureAwait(false); | ||||||
|  |         await destination.FlushAsync(cancellationToken).ConfigureAwait(false); | ||||||
|         File.SetLastWriteTimeUtc(path, utcTimestamp); |         File.SetLastWriteTimeUtc(path, utcTimestamp); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
| @@ -85,7 +85,7 @@ public sealed class TrivyDbPackageBuilder | |||||||
|             configDescriptor, |             configDescriptor, | ||||||
|             ImmutableArray.Create(layerDescriptor)); |             ImmutableArray.Create(layerDescriptor)); | ||||||
| 
 | 
 | ||||||
|         var blobs = new Dictionary<string, TrivyDbBlob>(StringComparer.Ordinal) |         var blobs = new SortedDictionary<string, TrivyDbBlob>(StringComparer.Ordinal) | ||||||
|         { |         { | ||||||
|             [configDigest] = TrivyDbBlob.FromBytes(configBytes), |             [configDigest] = TrivyDbBlob.FromBytes(configBytes), | ||||||
|             [request.DatabaseDigest] = TrivyDbBlob.FromFile(request.DatabaseArchivePath, request.DatabaseLength), |             [request.DatabaseDigest] = TrivyDbBlob.FromFile(request.DatabaseArchivePath, request.DatabaseLength), | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user