Add comprehensive security tests for OWASP A02, A05, A07, and A08 categories
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Policy Simulation / policy-simulate (push) Has been cancelled

- Implemented tests for Cryptographic Failures (A02) to ensure proper handling of sensitive data, secure algorithms, and key management.
- Added tests for Security Misconfiguration (A05) to validate production configurations, security headers, CORS settings, and feature management.
- Developed tests for Authentication Failures (A07) to enforce strong password policies, rate limiting, session management, and MFA support.
- Created tests for Software and Data Integrity Failures (A08) to verify artifact signatures, SBOM integrity, attestation chains, and feed updates.
This commit is contained in:
master
2025-12-16 16:40:19 +02:00
parent 415eff1207
commit 2170a58734
206 changed files with 30547 additions and 534 deletions

View File

@@ -0,0 +1,336 @@
// =============================================================================
// CycloneDxParser.cs
// CycloneDX SBOM parser implementation
// Part of Step 2: Evidence Collection (Task T5)
// =============================================================================
using System.Text.Json;
using System.Text.Json.Nodes;
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
/// <summary>
/// Parser for CycloneDX SBOM format (JSON).
/// Supports CycloneDX 1.4, 1.5, and 1.6 schemas.
/// </summary>
public sealed class CycloneDxParser : ISbomParser
{
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNameCaseInsensitive = true,
AllowTrailingCommas = true,
ReadCommentHandling = JsonCommentHandling.Skip
};
public SbomFormat DetectFormat(string filePath)
{
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
// CycloneDX files typically end with .cdx.json or .bom.json
if (filePath.EndsWith(".cdx.json", StringComparison.OrdinalIgnoreCase) ||
filePath.EndsWith(".bom.json", StringComparison.OrdinalIgnoreCase))
{
return SbomFormat.CycloneDx;
}
// Try to detect from content
if (File.Exists(filePath))
{
try
{
using var stream = File.OpenRead(filePath);
using var reader = new StreamReader(stream);
var firstChars = new char[1024];
var read = reader.Read(firstChars, 0, firstChars.Length);
var content = new string(firstChars, 0, read);
if (content.Contains("\"bomFormat\"", StringComparison.OrdinalIgnoreCase) ||
content.Contains("\"$schema\"", StringComparison.OrdinalIgnoreCase) &&
content.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase))
{
return SbomFormat.CycloneDx;
}
}
catch
{
// Ignore detection errors
}
}
return SbomFormat.Unknown;
}
public async Task<SbomParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
if (!File.Exists(filePath))
{
return SbomParseResult.Failure($"File not found: {filePath}", SbomFormat.CycloneDx);
}
try
{
await using var stream = File.OpenRead(filePath);
return await ParseAsync(stream, SbomFormat.CycloneDx, cancellationToken);
}
catch (Exception ex)
{
return SbomParseResult.Failure($"Failed to parse CycloneDX file: {ex.Message}", SbomFormat.CycloneDx);
}
}
public async Task<SbomParseResult> ParseAsync(Stream stream, SbomFormat format, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(stream);
try
{
using var document = await JsonDocument.ParseAsync(stream, default, cancellationToken);
var root = document.RootElement;
// Validate bomFormat
if (!root.TryGetProperty("bomFormat", out var bomFormatProp) ||
!bomFormatProp.GetString()?.Equals("CycloneDX", StringComparison.OrdinalIgnoreCase) == true)
{
// Try alternative detection
if (!root.TryGetProperty("$schema", out var schemaProp) ||
!schemaProp.GetString()?.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase) == true)
{
return SbomParseResult.Failure("Not a valid CycloneDX document", SbomFormat.CycloneDx);
}
}
// Extract spec version
string? specVersion = null;
if (root.TryGetProperty("specVersion", out var specProp))
{
specVersion = specProp.GetString();
}
// Extract serial number
string? serialNumber = null;
if (root.TryGetProperty("serialNumber", out var serialProp))
{
serialNumber = serialProp.GetString();
}
// Extract creation timestamp
DateTimeOffset? createdAt = null;
if (root.TryGetProperty("metadata", out var metadataProp))
{
if (metadataProp.TryGetProperty("timestamp", out var timestampProp))
{
if (DateTimeOffset.TryParse(timestampProp.GetString(), out var parsed))
{
createdAt = parsed;
}
}
}
// Extract generator tool
string? generatorTool = null;
if (root.TryGetProperty("metadata", out var meta) &&
meta.TryGetProperty("tools", out var toolsProp))
{
generatorTool = ExtractToolInfo(toolsProp);
}
// Extract primary component (metadata.component)
SbomSubject? primarySubject = null;
if (root.TryGetProperty("metadata", out var metaData) &&
metaData.TryGetProperty("component", out var primaryComponent))
{
primarySubject = ParseComponent(primaryComponent);
}
// Extract all components
var subjects = new List<SbomSubject>();
int totalComponentCount = 0;
if (root.TryGetProperty("components", out var componentsProp) &&
componentsProp.ValueKind == JsonValueKind.Array)
{
foreach (var component in componentsProp.EnumerateArray())
{
totalComponentCount++;
var subject = ParseComponent(component);
if (subject is not null)
{
subjects.Add(subject);
}
}
}
// Add primary subject if it has a digest and isn't already in the list
if (primarySubject is not null &&
!subjects.Any(s => s.Digest.Equals(primarySubject.Digest, StringComparison.OrdinalIgnoreCase)))
{
subjects.Insert(0, primarySubject);
}
// Sort subjects for deterministic ordering
subjects = subjects
.OrderBy(s => s.Digest, StringComparer.Ordinal)
.ThenBy(s => s.Name ?? string.Empty, StringComparer.Ordinal)
.ToList();
return SbomParseResult.Success(
format: SbomFormat.CycloneDx,
subjects: subjects,
specVersion: specVersion,
serialNumber: serialNumber,
createdAt: createdAt,
generatorTool: generatorTool,
primarySubject: primarySubject,
totalComponentCount: totalComponentCount);
}
catch (JsonException ex)
{
return SbomParseResult.Failure($"JSON parsing error: {ex.Message}", SbomFormat.CycloneDx);
}
}
private static SbomSubject? ParseComponent(JsonElement component)
{
// Extract hashes
var hashes = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
if (component.TryGetProperty("hashes", out var hashesProp) &&
hashesProp.ValueKind == JsonValueKind.Array)
{
foreach (var hash in hashesProp.EnumerateArray())
{
if (hash.TryGetProperty("alg", out var algProp) &&
hash.TryGetProperty("content", out var contentProp))
{
var alg = algProp.GetString();
var content = contentProp.GetString();
if (!string.IsNullOrEmpty(alg) && !string.IsNullOrEmpty(content))
{
hashes[alg] = content;
}
}
}
}
// Determine primary digest (prefer SHA-256)
string? digest = null;
if (hashes.TryGetValue("SHA-256", out var sha256))
{
digest = NormalizeDigest("sha256:" + sha256);
}
else if (hashes.TryGetValue("SHA256", out sha256))
{
digest = NormalizeDigest("sha256:" + sha256);
}
else if (hashes.Count > 0)
{
// Use first available hash
var first = hashes.First();
digest = NormalizeDigest($"{first.Key.ToLowerInvariant().Replace("-", "")}:{first.Value}");
}
// If no digest, this component can't be indexed by digest
if (string.IsNullOrEmpty(digest))
{
return null;
}
// Extract other properties
string? name = null;
if (component.TryGetProperty("name", out var nameProp))
{
name = nameProp.GetString();
}
string? version = null;
if (component.TryGetProperty("version", out var versionProp))
{
version = versionProp.GetString();
}
string? purl = null;
if (component.TryGetProperty("purl", out var purlProp))
{
purl = purlProp.GetString();
}
string? type = null;
if (component.TryGetProperty("type", out var typeProp))
{
type = typeProp.GetString();
}
string? bomRef = null;
if (component.TryGetProperty("bom-ref", out var bomRefProp))
{
bomRef = bomRefProp.GetString();
}
return new SbomSubject
{
Digest = digest,
Name = name,
Version = version,
Purl = purl,
Type = type,
BomRef = bomRef,
Hashes = hashes
};
}
private static string? ExtractToolInfo(JsonElement tools)
{
// CycloneDX 1.5+ uses tools.components array
if (tools.TryGetProperty("components", out var components) &&
components.ValueKind == JsonValueKind.Array)
{
var toolList = new List<string>();
foreach (var tool in components.EnumerateArray())
{
if (tool.TryGetProperty("name", out var name))
{
var toolName = name.GetString();
if (!string.IsNullOrEmpty(toolName))
{
if (tool.TryGetProperty("version", out var version))
{
toolName += $"@{version.GetString()}";
}
toolList.Add(toolName);
}
}
}
return toolList.Count > 0 ? string.Join(", ", toolList) : null;
}
// CycloneDX 1.4 and earlier uses tools array directly
if (tools.ValueKind == JsonValueKind.Array)
{
var toolList = new List<string>();
foreach (var tool in tools.EnumerateArray())
{
if (tool.TryGetProperty("name", out var name))
{
var toolName = name.GetString();
if (!string.IsNullOrEmpty(toolName))
{
if (tool.TryGetProperty("version", out var version))
{
toolName += $"@{version.GetString()}";
}
toolList.Add(toolName);
}
}
}
return toolList.Count > 0 ? string.Join(", ", toolList) : null;
}
return null;
}
private static string NormalizeDigest(string digest)
{
return ArtifactIndex.NormalizeDigest(digest);
}
}