Merge remote-tracking branch 'origin/main' into feature/docs-mdx-skeletons
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Policy Simulation / policy-simulate (push) Has been cancelled
SDK Publish & Sign / sdk-publish (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
sdk-generator-smoke / sdk-smoke (push) Has been cancelled
Airgap Sealed CI Smoke / sealed-smoke (push) Has been cancelled
Console CI / console-ci (push) Has been cancelled
Symbols Server CI / symbols-smoke (push) Has been cancelled
VEX Proof Bundles / verify-bundles (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-12-05 23:14:58 +02:00
7590 changed files with 22444 additions and 7465469 deletions

View File

@@ -0,0 +1,151 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stellaops.io/schemas/verification-policy.v1.json",
"title": "VerificationPolicy",
"description": "Attestation verification policy configuration for StellaOps",
"type": "object",
"required": ["policyId", "version", "predicateTypes", "signerRequirements"],
"properties": {
"policyId": {
"type": "string",
"description": "Unique policy identifier",
"pattern": "^[a-z0-9-]+$",
"examples": ["default-verification-policy", "strict-slsa-policy"]
},
"version": {
"type": "string",
"description": "Policy version (SemVer)",
"pattern": "^\\d+\\.\\d+\\.\\d+$",
"examples": ["1.0.0", "2.1.0"]
},
"description": {
"type": "string",
"description": "Human-readable policy description"
},
"tenantScope": {
"type": "string",
"description": "Tenant ID this policy applies to, or '*' for all tenants",
"default": "*"
},
"predicateTypes": {
"type": "array",
"description": "Allowed attestation predicate types",
"items": {
"type": "string"
},
"minItems": 1,
"examples": [
["stella.ops/sbom@v1", "stella.ops/vex@v1"]
]
},
"signerRequirements": {
"$ref": "#/$defs/SignerRequirements"
},
"validityWindow": {
"$ref": "#/$defs/ValidityWindow"
},
"metadata": {
"type": "object",
"description": "Free-form metadata",
"additionalProperties": true
}
},
"$defs": {
"SignerRequirements": {
"type": "object",
"description": "Requirements for attestation signers",
"properties": {
"minimumSignatures": {
"type": "integer",
"minimum": 1,
"default": 1,
"description": "Minimum number of valid signatures required"
},
"trustedKeyFingerprints": {
"type": "array",
"items": {
"type": "string",
"pattern": "^sha256:[a-f0-9]{64}$"
},
"description": "List of trusted signer key fingerprints (SHA-256)"
},
"trustedIssuers": {
"type": "array",
"items": {
"type": "string",
"format": "uri"
},
"description": "List of trusted issuer identities (OIDC issuers)"
},
"requireRekor": {
"type": "boolean",
"default": false,
"description": "Require Sigstore Rekor transparency log entry"
},
"algorithms": {
"type": "array",
"items": {
"type": "string",
"enum": ["ES256", "ES384", "ES512", "RS256", "RS384", "RS512", "EdDSA"]
},
"description": "Allowed signing algorithms",
"default": ["ES256", "RS256", "EdDSA"]
}
}
},
"ValidityWindow": {
"type": "object",
"description": "Time-based validity constraints",
"properties": {
"notBefore": {
"type": "string",
"format": "date-time",
"description": "Policy not valid before this time (ISO-8601)"
},
"notAfter": {
"type": "string",
"format": "date-time",
"description": "Policy not valid after this time (ISO-8601)"
},
"maxAttestationAge": {
"type": "integer",
"minimum": 0,
"description": "Maximum age of attestation in seconds (0 = no limit)"
}
}
}
},
"examples": [
{
"policyId": "default-verification-policy",
"version": "1.0.0",
"description": "Default verification policy for StellaOps attestations",
"tenantScope": "*",
"predicateTypes": [
"stella.ops/sbom@v1",
"stella.ops/vex@v1",
"stella.ops/vexDecision@v1",
"stella.ops/policy@v1",
"stella.ops/promotion@v1",
"stella.ops/evidence@v1",
"stella.ops/graph@v1",
"stella.ops/replay@v1",
"https://slsa.dev/provenance/v1",
"https://cyclonedx.org/bom",
"https://spdx.dev/Document",
"https://openvex.dev/ns"
],
"signerRequirements": {
"minimumSignatures": 1,
"trustedKeyFingerprints": [
"sha256:a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2"
],
"requireRekor": false,
"algorithms": ["ES256", "RS256", "EdDSA"]
},
"validityWindow": {
"maxAttestationAge": 86400
}
}
]
}

View File

@@ -3,6 +3,7 @@ using System.Diagnostics;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Cryptography;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Observability;
using StellaOps.Findings.Ledger.Options;
@@ -14,6 +15,7 @@ public sealed class LedgerMerkleAnchorWorker : BackgroundService
{
private readonly LedgerAnchorQueue _queue;
private readonly IMerkleAnchorRepository _repository;
private readonly ICryptoHash _cryptoHash;
private readonly TimeProvider _timeProvider;
private readonly LedgerServiceOptions.MerkleOptions _options;
private readonly ILogger<LedgerMerkleAnchorWorker> _logger;
@@ -22,12 +24,14 @@ public sealed class LedgerMerkleAnchorWorker : BackgroundService
public LedgerMerkleAnchorWorker(
LedgerAnchorQueue queue,
IMerkleAnchorRepository repository,
ICryptoHash cryptoHash,
IOptions<LedgerServiceOptions> options,
TimeProvider timeProvider,
ILogger<LedgerMerkleAnchorWorker> logger)
{
_queue = queue ?? throw new ArgumentNullException(nameof(queue));
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value.Merkle ?? throw new ArgumentNullException(nameof(options));
@@ -89,7 +93,7 @@ public sealed class LedgerMerkleAnchorWorker : BackgroundService
.ThenBy(e => e.RecordedAt)
.ToList();
var rootHash = MerkleTreeBuilder.ComputeRoot(orderedEvents.Select(e => e.MerkleLeafHash).ToArray());
var rootHash = MerkleTreeBuilder.ComputeRoot(_cryptoHash, orderedEvents.Select(e => e.MerkleLeafHash).ToArray());
var anchorId = Guid.NewGuid();
var windowStart = orderedEvents.First().RecordedAt;
var windowEnd = orderedEvents.Last().RecordedAt;

View File

@@ -1,12 +1,14 @@
using System.Security.Cryptography;
using System.Text;
using StellaOps.Cryptography;
namespace StellaOps.Findings.Ledger.Infrastructure.Merkle;
internal static class MerkleTreeBuilder
{
public static string ComputeRoot(IReadOnlyList<string> leafHashes)
public static string ComputeRoot(ICryptoHash cryptoHash, IReadOnlyList<string> leafHashes)
{
ArgumentNullException.ThrowIfNull(cryptoHash);
if (leafHashes.Count == 0)
{
throw new ArgumentException("At least one leaf hash is required to compute a Merkle root.", nameof(leafHashes));
@@ -18,13 +20,13 @@ internal static class MerkleTreeBuilder
while (currentLevel.Length > 1)
{
currentLevel = ComputeNextLevel(currentLevel);
currentLevel = ComputeNextLevel(cryptoHash, currentLevel);
}
return currentLevel[0];
}
private static string[] ComputeNextLevel(IReadOnlyList<string> level)
private static string[] ComputeNextLevel(ICryptoHash cryptoHash, IReadOnlyList<string> level)
{
var next = new string[(level.Count + 1) / 2];
var index = 0;
@@ -33,16 +35,15 @@ internal static class MerkleTreeBuilder
{
var left = level[i];
var right = i + 1 < level.Count ? level[i + 1] : level[i];
next[index++] = HashPair(left, right);
next[index++] = HashPair(cryptoHash, left, right);
}
return next;
}
private static string HashPair(string left, string right)
private static string HashPair(ICryptoHash cryptoHash, string left, string right)
{
var bytes = Encoding.UTF8.GetBytes(left + right);
var hashBytes = SHA256.HashData(bytes);
return Convert.ToHexString(hashBytes).ToLowerInvariant();
return cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Merkle);
}
}

View File

@@ -30,4 +30,8 @@
<FrameworkReference Include="Microsoft.AspNetCore.App" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,4 +1,5 @@
using StellaOps.Gateway.WebService.Middleware;
using StellaOps.Gateway.WebService.OpenApi;
namespace StellaOps.Gateway.WebService;
@@ -25,4 +26,15 @@ public static class ApplicationBuilderExtensions
return app;
}
/// <summary>
/// Maps OpenAPI endpoints to the application.
/// Should be called before UseGatewayRouter so OpenAPI requests are handled first.
/// </summary>
/// <param name="endpoints">The endpoint route builder.</param>
/// <returns>The endpoint route builder for chaining.</returns>
public static IEndpointRouteBuilder MapGatewayOpenApi(this IEndpointRouteBuilder endpoints)
{
return endpoints.MapGatewayOpenApiEndpoints();
}
}

View File

@@ -1,4 +1,5 @@
using Microsoft.Extensions.Logging;
using StellaOps.Gateway.WebService.OpenApi;
using StellaOps.Router.Common.Abstractions;
using StellaOps.Router.Common.Enums;
using StellaOps.Router.Common.Models;
@@ -14,17 +15,20 @@ internal sealed class ConnectionManager : IHostedService
private readonly InMemoryTransportServer _transportServer;
private readonly InMemoryConnectionRegistry _connectionRegistry;
private readonly IGlobalRoutingState _routingState;
private readonly IGatewayOpenApiDocumentCache? _openApiCache;
private readonly ILogger<ConnectionManager> _logger;
public ConnectionManager(
InMemoryTransportServer transportServer,
InMemoryConnectionRegistry connectionRegistry,
IGlobalRoutingState routingState,
ILogger<ConnectionManager> logger)
ILogger<ConnectionManager> logger,
IGatewayOpenApiDocumentCache? openApiCache = null)
{
_transportServer = transportServer;
_connectionRegistry = connectionRegistry;
_routingState = routingState;
_openApiCache = openApiCache;
_logger = logger;
}
@@ -55,11 +59,12 @@ internal sealed class ConnectionManager : IHostedService
private Task HandleHelloReceivedAsync(ConnectionState connectionState, HelloPayload payload)
{
_logger.LogInformation(
"Connection registered: {ConnectionId} from {ServiceName}/{Version} with {EndpointCount} endpoints",
"Connection registered: {ConnectionId} from {ServiceName}/{Version} with {EndpointCount} endpoints, {SchemaCount} schemas",
connectionState.ConnectionId,
connectionState.Instance.ServiceName,
connectionState.Instance.Version,
connectionState.Endpoints.Count);
connectionState.Endpoints.Count,
connectionState.Schemas.Count);
// Add the connection to the routing state
_routingState.AddConnection(connectionState);
@@ -67,6 +72,9 @@ internal sealed class ConnectionManager : IHostedService
// Start listening to this connection for frames
_transportServer.StartListeningToConnection(connectionState.ConnectionId);
// Invalidate OpenAPI cache when connections change
_openApiCache?.Invalidate();
return Task.CompletedTask;
}
@@ -94,6 +102,9 @@ internal sealed class ConnectionManager : IHostedService
// Remove from routing state
_routingState.RemoveConnection(connectionId);
// Invalidate OpenAPI cache when connections change
_openApiCache?.Invalidate();
return Task.CompletedTask;
}
}

View File

@@ -0,0 +1,106 @@
using System.Text.Json.Nodes;
using StellaOps.Router.Common.Models;
namespace StellaOps.Gateway.WebService.OpenApi;
/// <summary>
/// Maps claim requirements to OpenAPI security schemes.
/// </summary>
internal static class ClaimSecurityMapper
{
/// <summary>
/// Generates security schemes from claim requirements.
/// </summary>
/// <param name="endpoints">All endpoint descriptors.</param>
/// <param name="tokenUrl">The OAuth2 token URL.</param>
/// <returns>Security schemes JSON object.</returns>
public static JsonObject GenerateSecuritySchemes(
IEnumerable<EndpointDescriptor> endpoints,
string tokenUrl)
{
var schemes = new JsonObject();
// Always add BearerAuth scheme
schemes["BearerAuth"] = new JsonObject
{
["type"] = "http",
["scheme"] = "bearer",
["bearerFormat"] = "JWT",
["description"] = "JWT Bearer token authentication"
};
// Collect all unique scopes from claims
var scopes = new Dictionary<string, string>();
foreach (var endpoint in endpoints)
{
foreach (var claim in endpoint.RequiringClaims)
{
var scope = claim.Type;
if (!scopes.ContainsKey(scope))
{
scopes[scope] = $"Access scope: {scope}";
}
}
}
// Add OAuth2 scheme if there are any scopes
if (scopes.Count > 0)
{
var scopesObject = new JsonObject();
foreach (var (scope, description) in scopes)
{
scopesObject[scope] = description;
}
schemes["OAuth2"] = new JsonObject
{
["type"] = "oauth2",
["flows"] = new JsonObject
{
["clientCredentials"] = new JsonObject
{
["tokenUrl"] = tokenUrl,
["scopes"] = scopesObject
}
}
};
}
return schemes;
}
/// <summary>
/// Generates security requirement for an endpoint.
/// </summary>
/// <param name="endpoint">The endpoint descriptor.</param>
/// <returns>Security requirement JSON array.</returns>
public static JsonArray GenerateSecurityRequirement(EndpointDescriptor endpoint)
{
var requirements = new JsonArray();
if (endpoint.RequiringClaims.Count == 0)
{
return requirements;
}
var requirement = new JsonObject();
// Always require BearerAuth
requirement["BearerAuth"] = new JsonArray();
// Add OAuth2 scopes
var scopes = new JsonArray();
foreach (var claim in endpoint.RequiringClaims)
{
scopes.Add(claim.Type);
}
if (scopes.Count > 0)
{
requirement["OAuth2"] = scopes;
}
requirements.Add(requirement);
return requirements;
}
}

View File

@@ -0,0 +1,69 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Options;
namespace StellaOps.Gateway.WebService.OpenApi;
/// <summary>
/// Caches the generated OpenAPI document with TTL-based expiration.
/// </summary>
internal sealed class GatewayOpenApiDocumentCache : IGatewayOpenApiDocumentCache
{
private readonly IOpenApiDocumentGenerator _generator;
private readonly OpenApiAggregationOptions _options;
private readonly object _lock = new();
private string? _cachedDocument;
private string? _cachedETag;
private DateTime _generatedAt;
private bool _invalidated = true;
public GatewayOpenApiDocumentCache(
IOpenApiDocumentGenerator generator,
IOptions<OpenApiAggregationOptions> options)
{
_generator = generator;
_options = options.Value;
}
/// <inheritdoc />
public (string DocumentJson, string ETag, DateTime GeneratedAt) GetDocument()
{
lock (_lock)
{
var now = DateTime.UtcNow;
var ttl = TimeSpan.FromSeconds(_options.CacheTtlSeconds);
// Check if we need to regenerate
if (_invalidated || _cachedDocument is null || now - _generatedAt > ttl)
{
Regenerate();
}
return (_cachedDocument!, _cachedETag!, _generatedAt);
}
}
/// <inheritdoc />
public void Invalidate()
{
lock (_lock)
{
_invalidated = true;
}
}
private void Regenerate()
{
_cachedDocument = _generator.GenerateDocument();
_cachedETag = ComputeETag(_cachedDocument);
_generatedAt = DateTime.UtcNow;
_invalidated = false;
}
private static string ComputeETag(string content)
{
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content));
return $"\"{Convert.ToHexString(hash)[..16]}\"";
}
}

View File

@@ -0,0 +1,18 @@
namespace StellaOps.Gateway.WebService.OpenApi;
/// <summary>
/// Caches the generated OpenAPI document with TTL-based expiration.
/// </summary>
public interface IGatewayOpenApiDocumentCache
{
/// <summary>
/// Gets the cached document or regenerates if expired.
/// </summary>
/// <returns>A tuple containing the document JSON, ETag, and generation timestamp.</returns>
(string DocumentJson, string ETag, DateTime GeneratedAt) GetDocument();
/// <summary>
/// Invalidates the cache, forcing regeneration on next access.
/// </summary>
void Invalidate();
}

View File

@@ -0,0 +1,13 @@
namespace StellaOps.Gateway.WebService.OpenApi;
/// <summary>
/// Generates OpenAPI documents from aggregated microservice schemas.
/// </summary>
public interface IOpenApiDocumentGenerator
{
/// <summary>
/// Generates the OpenAPI 3.1.0 document as JSON.
/// </summary>
/// <returns>The OpenAPI document as a JSON string.</returns>
string GenerateDocument();
}

View File

@@ -0,0 +1,62 @@
namespace StellaOps.Gateway.WebService.OpenApi;
/// <summary>
/// Configuration options for OpenAPI document aggregation.
/// </summary>
public sealed class OpenApiAggregationOptions
{
/// <summary>
/// The configuration section name.
/// </summary>
public const string SectionName = "OpenApi";
/// <summary>
/// Gets or sets the API title.
/// </summary>
public string Title { get; set; } = "StellaOps Gateway API";
/// <summary>
/// Gets or sets the API description.
/// </summary>
public string Description { get; set; } = "Unified API aggregating all connected microservices.";
/// <summary>
/// Gets or sets the API version.
/// </summary>
public string Version { get; set; } = "1.0.0";
/// <summary>
/// Gets or sets the server URL.
/// </summary>
public string ServerUrl { get; set; } = "/";
/// <summary>
/// Gets or sets the cache TTL in seconds.
/// </summary>
public int CacheTtlSeconds { get; set; } = 60;
/// <summary>
/// Gets or sets whether OpenAPI aggregation is enabled.
/// </summary>
public bool Enabled { get; set; } = true;
/// <summary>
/// Gets or sets the license name.
/// </summary>
public string LicenseName { get; set; } = "AGPL-3.0-or-later";
/// <summary>
/// Gets or sets the contact name.
/// </summary>
public string? ContactName { get; set; }
/// <summary>
/// Gets or sets the contact email.
/// </summary>
public string? ContactEmail { get; set; }
/// <summary>
/// Gets or sets the OAuth2 token URL for security schemes.
/// </summary>
public string TokenUrl { get; set; } = "/auth/token";
}

View File

@@ -0,0 +1,285 @@
using System.Text.Json;
using System.Text.Json.Nodes;
using Microsoft.Extensions.Options;
using StellaOps.Router.Common.Abstractions;
using StellaOps.Router.Common.Models;
namespace StellaOps.Gateway.WebService.OpenApi;
/// <summary>
/// Generates OpenAPI 3.1.0 documents from aggregated microservice schemas.
/// </summary>
internal sealed class OpenApiDocumentGenerator : IOpenApiDocumentGenerator
{
private readonly IGlobalRoutingState _routingState;
private readonly OpenApiAggregationOptions _options;
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = true
};
public OpenApiDocumentGenerator(
IGlobalRoutingState routingState,
IOptions<OpenApiAggregationOptions> options)
{
_routingState = routingState;
_options = options.Value;
}
/// <inheritdoc />
public string GenerateDocument()
{
var connections = _routingState.GetAllConnections();
var doc = new JsonObject
{
["openapi"] = "3.1.0",
["info"] = GenerateInfo(),
["servers"] = GenerateServers(),
["paths"] = GeneratePaths(connections),
["components"] = GenerateComponents(connections),
["tags"] = GenerateTags(connections)
};
return doc.ToJsonString(JsonOptions);
}
private JsonObject GenerateInfo()
{
var info = new JsonObject
{
["title"] = _options.Title,
["version"] = _options.Version,
["description"] = _options.Description,
["license"] = new JsonObject
{
["name"] = _options.LicenseName
}
};
if (_options.ContactName is not null || _options.ContactEmail is not null)
{
var contact = new JsonObject();
if (_options.ContactName is not null)
contact["name"] = _options.ContactName;
if (_options.ContactEmail is not null)
contact["email"] = _options.ContactEmail;
info["contact"] = contact;
}
return info;
}
private JsonArray GenerateServers()
{
return new JsonArray
{
new JsonObject
{
["url"] = _options.ServerUrl
}
};
}
private JsonObject GeneratePaths(IReadOnlyList<ConnectionState> connections)
{
var paths = new JsonObject();
// Group endpoints by path
var pathGroups = new Dictionary<string, List<(ConnectionState Conn, EndpointDescriptor Endpoint)>>();
foreach (var conn in connections)
{
foreach (var endpoint in conn.Endpoints.Values)
{
if (!pathGroups.TryGetValue(endpoint.Path, out var list))
{
list = [];
pathGroups[endpoint.Path] = list;
}
list.Add((conn, endpoint));
}
}
// Generate path items
foreach (var (path, endpoints) in pathGroups.OrderBy(p => p.Key))
{
var pathItem = new JsonObject();
foreach (var (conn, endpoint) in endpoints)
{
var operation = GenerateOperation(conn, endpoint);
var method = endpoint.Method.ToLowerInvariant();
pathItem[method] = operation;
}
paths[path] = pathItem;
}
return paths;
}
private JsonObject GenerateOperation(ConnectionState conn, EndpointDescriptor endpoint)
{
var operation = new JsonObject
{
["operationId"] = $"{conn.Instance.ServiceName}_{endpoint.Path.Replace("/", "_").Trim('_')}_{endpoint.Method}",
["tags"] = new JsonArray { conn.Instance.ServiceName }
};
// Add documentation from SchemaInfo
if (endpoint.SchemaInfo is not null)
{
if (endpoint.SchemaInfo.Summary is not null)
operation["summary"] = endpoint.SchemaInfo.Summary;
if (endpoint.SchemaInfo.Description is not null)
operation["description"] = endpoint.SchemaInfo.Description;
if (endpoint.SchemaInfo.Deprecated)
operation["deprecated"] = true;
// Override tags if specified
if (endpoint.SchemaInfo.Tags.Count > 0)
{
var tags = new JsonArray();
foreach (var tag in endpoint.SchemaInfo.Tags)
{
tags.Add(tag);
}
operation["tags"] = tags;
}
}
// Add security requirements
var security = ClaimSecurityMapper.GenerateSecurityRequirement(endpoint);
if (security.Count > 0)
{
operation["security"] = security;
}
// Add request body if schema exists
if (endpoint.SchemaInfo?.RequestSchemaId is not null)
{
var schemaRef = $"#/components/schemas/{conn.Instance.ServiceName}_{endpoint.SchemaInfo.RequestSchemaId}";
operation["requestBody"] = new JsonObject
{
["required"] = true,
["content"] = new JsonObject
{
["application/json"] = new JsonObject
{
["schema"] = new JsonObject
{
["$ref"] = schemaRef
}
}
}
};
}
// Add responses
var responses = new JsonObject();
// Success response
var successResponse = new JsonObject
{
["description"] = "Success"
};
if (endpoint.SchemaInfo?.ResponseSchemaId is not null)
{
var schemaRef = $"#/components/schemas/{conn.Instance.ServiceName}_{endpoint.SchemaInfo.ResponseSchemaId}";
successResponse["content"] = new JsonObject
{
["application/json"] = new JsonObject
{
["schema"] = new JsonObject
{
["$ref"] = schemaRef
}
}
};
}
responses["200"] = successResponse;
// Error responses
responses["400"] = new JsonObject { ["description"] = "Bad Request" };
responses["401"] = new JsonObject { ["description"] = "Unauthorized" };
responses["404"] = new JsonObject { ["description"] = "Not Found" };
responses["422"] = new JsonObject { ["description"] = "Validation Error" };
responses["500"] = new JsonObject { ["description"] = "Internal Server Error" };
operation["responses"] = responses;
return operation;
}
private JsonObject GenerateComponents(IReadOnlyList<ConnectionState> connections)
{
var components = new JsonObject();
// Generate schemas with service prefix
var schemas = new JsonObject();
foreach (var conn in connections)
{
foreach (var (schemaId, schemaDef) in conn.Schemas)
{
var prefixedId = $"{conn.Instance.ServiceName}_{schemaId}";
try
{
var schemaNode = JsonNode.Parse(schemaDef.SchemaJson);
if (schemaNode is not null)
{
schemas[prefixedId] = schemaNode;
}
}
catch (JsonException)
{
// Skip invalid schemas
}
}
}
if (schemas.Count > 0)
{
components["schemas"] = schemas;
}
// Generate security schemes
var allEndpoints = connections.SelectMany(c => c.Endpoints.Values);
var securitySchemes = ClaimSecurityMapper.GenerateSecuritySchemes(allEndpoints, _options.TokenUrl);
if (securitySchemes.Count > 0)
{
components["securitySchemes"] = securitySchemes;
}
return components;
}
private JsonArray GenerateTags(IReadOnlyList<ConnectionState> connections)
{
var tags = new JsonArray();
var seen = new HashSet<string>();
foreach (var conn in connections)
{
var serviceName = conn.Instance.ServiceName;
if (seen.Add(serviceName))
{
var tag = new JsonObject
{
["name"] = serviceName,
["description"] = $"{serviceName} microservice (v{conn.Instance.Version})"
};
if (conn.OpenApiInfo?.Description is not null)
{
tag["description"] = conn.OpenApiInfo.Description;
}
tags.Add(tag);
}
}
return tags;
}
}

View File

@@ -0,0 +1,124 @@
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
using Microsoft.AspNetCore.Mvc;
using YamlDotNet.Serialization;
using YamlDotNet.Serialization.NamingConventions;
namespace StellaOps.Gateway.WebService.OpenApi;
/// <summary>
/// Endpoints for serving OpenAPI documentation.
/// </summary>
public static class OpenApiEndpoints
{
private static readonly ISerializer YamlSerializer = new SerializerBuilder()
.WithNamingConvention(CamelCaseNamingConvention.Instance)
.Build();
/// <summary>
/// Maps OpenAPI endpoints to the application.
/// </summary>
public static IEndpointRouteBuilder MapGatewayOpenApiEndpoints(this IEndpointRouteBuilder endpoints)
{
endpoints.MapGet("/.well-known/openapi", GetOpenApiDiscovery)
.ExcludeFromDescription();
endpoints.MapGet("/openapi.json", GetOpenApiJson)
.ExcludeFromDescription();
endpoints.MapGet("/openapi.yaml", GetOpenApiYaml)
.ExcludeFromDescription();
return endpoints;
}
private static IResult GetOpenApiDiscovery(
[FromServices] IGatewayOpenApiDocumentCache cache,
HttpContext context)
{
var (_, etag, generatedAt) = cache.GetDocument();
var discovery = new
{
openapi_json = "/openapi.json",
openapi_yaml = "/openapi.yaml",
etag,
generated_at = generatedAt.ToString("O")
};
context.Response.Headers.CacheControl = "public, max-age=60";
return Results.Ok(discovery);
}
private static IResult GetOpenApiJson(
[FromServices] IGatewayOpenApiDocumentCache cache,
HttpContext context)
{
var (documentJson, etag, _) = cache.GetDocument();
// Check If-None-Match header
if (context.Request.Headers.TryGetValue("If-None-Match", out var ifNoneMatch))
{
if (ifNoneMatch == etag)
{
context.Response.Headers.ETag = etag;
context.Response.Headers.CacheControl = "public, max-age=60";
return Results.StatusCode(304);
}
}
context.Response.Headers.ETag = etag;
context.Response.Headers.CacheControl = "public, max-age=60";
return Results.Content(documentJson, "application/json; charset=utf-8");
}
private static IResult GetOpenApiYaml(
[FromServices] IGatewayOpenApiDocumentCache cache,
HttpContext context)
{
var (documentJson, etag, _) = cache.GetDocument();
// Check If-None-Match header
if (context.Request.Headers.TryGetValue("If-None-Match", out var ifNoneMatch))
{
if (ifNoneMatch == etag)
{
context.Response.Headers.ETag = etag;
context.Response.Headers.CacheControl = "public, max-age=60";
return Results.StatusCode(304);
}
}
// Convert JSON to YAML
var jsonNode = JsonNode.Parse(documentJson);
var yamlContent = ConvertToYaml(jsonNode);
context.Response.Headers.ETag = etag;
context.Response.Headers.CacheControl = "public, max-age=60";
return Results.Content(yamlContent, "application/yaml; charset=utf-8");
}
private static string ConvertToYaml(JsonNode? node)
{
if (node is null)
return string.Empty;
var obj = ConvertJsonNodeToObject(node);
return YamlSerializer.Serialize(obj);
}
private static object? ConvertJsonNodeToObject(JsonNode? node)
{
return node switch
{
null => null,
JsonObject obj => obj.ToDictionary(
kvp => kvp.Key,
kvp => ConvertJsonNodeToObject(kvp.Value)),
JsonArray arr => arr.Select(ConvertJsonNodeToObject).ToList(),
JsonValue val => val.GetValue<object>(),
_ => null
};
}
}

View File

@@ -1,3 +1,4 @@
using StellaOps.Gateway.WebService.OpenApi;
using StellaOps.Router.Common.Abstractions;
using StellaOps.Router.Transport.InMemory;
@@ -41,6 +42,12 @@ public static class ServiceCollectionExtensions
// Register health monitor as hosted service
services.AddHostedService<HealthMonitorService>();
// Register OpenAPI aggregation services
services.Configure<OpenApiAggregationOptions>(
configuration.GetSection(OpenApiAggregationOptions.SectionName));
services.AddSingleton<IOpenApiDocumentGenerator, OpenApiDocumentGenerator>();
services.AddSingleton<IGatewayOpenApiDocumentCache, GatewayOpenApiDocumentCache>();
return services;
}

View File

@@ -6,6 +6,9 @@
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="YamlDotNet" Version="16.2.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\__Libraries\StellaOps.Router.Common\StellaOps.Router.Common.csproj" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Router.Config\StellaOps.Router.Config.csproj" />

View File

@@ -0,0 +1,270 @@
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Moq;
using StellaOps.Gateway.WebService.Authorization;
using StellaOps.Router.Common.Models;
using Xunit;
namespace StellaOps.Gateway.WebService.Tests;
/// <summary>
/// Unit tests for <see cref="AuthorityClaimsRefreshService"/>.
/// </summary>
public sealed class AuthorityClaimsRefreshServiceTests
{
private readonly Mock<IAuthorityClaimsProvider> _claimsProviderMock;
private readonly Mock<IEffectiveClaimsStore> _claimsStoreMock;
private readonly AuthorityConnectionOptions _options;
public AuthorityClaimsRefreshServiceTests()
{
_claimsProviderMock = new Mock<IAuthorityClaimsProvider>();
_claimsStoreMock = new Mock<IEffectiveClaimsStore>();
_options = new AuthorityConnectionOptions
{
AuthorityUrl = "http://authority.local",
Enabled = true,
RefreshInterval = TimeSpan.FromMilliseconds(100),
WaitForAuthorityOnStartup = false,
StartupTimeout = TimeSpan.FromSeconds(1)
};
_claimsProviderMock.Setup(p => p.GetOverridesAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync(new Dictionary<EndpointKey, IReadOnlyList<ClaimRequirement>>());
}
private AuthorityClaimsRefreshService CreateService()
{
return new AuthorityClaimsRefreshService(
_claimsProviderMock.Object,
_claimsStoreMock.Object,
Options.Create(_options),
NullLogger<AuthorityClaimsRefreshService>.Instance);
}
#region ExecuteAsync Tests - Disabled
[Fact]
public async Task ExecuteAsync_WhenDisabled_DoesNotFetchClaims()
{
// Arrange
_options.Enabled = false;
var service = CreateService();
using var cts = new CancellationTokenSource();
// Act
await service.StartAsync(cts.Token);
await Task.Delay(50);
await service.StopAsync(cts.Token);
// Assert
_claimsProviderMock.Verify(
p => p.GetOverridesAsync(It.IsAny<CancellationToken>()),
Times.Never);
}
[Fact]
public async Task ExecuteAsync_WhenNoAuthorityUrl_DoesNotFetchClaims()
{
// Arrange
_options.AuthorityUrl = string.Empty;
var service = CreateService();
using var cts = new CancellationTokenSource();
// Act
await service.StartAsync(cts.Token);
await Task.Delay(50);
await service.StopAsync(cts.Token);
// Assert
_claimsProviderMock.Verify(
p => p.GetOverridesAsync(It.IsAny<CancellationToken>()),
Times.Never);
}
#endregion
#region ExecuteAsync Tests - Enabled
[Fact]
public async Task ExecuteAsync_WhenEnabled_FetchesClaims()
{
// Arrange
var service = CreateService();
using var cts = new CancellationTokenSource();
// Act
await service.StartAsync(cts.Token);
await Task.Delay(50);
await cts.CancelAsync();
await service.StopAsync(CancellationToken.None);
// Assert
_claimsProviderMock.Verify(
p => p.GetOverridesAsync(It.IsAny<CancellationToken>()),
Times.AtLeastOnce);
}
[Fact]
public async Task ExecuteAsync_UpdatesStoreWithOverrides()
{
// Arrange
var key = EndpointKey.Create("service", "GET", "/api/test");
var overrides = new Dictionary<EndpointKey, IReadOnlyList<ClaimRequirement>>
{
[key] = [new ClaimRequirement { Type = "role", Value = "admin" }]
};
_claimsProviderMock.Setup(p => p.GetOverridesAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync(overrides);
var service = CreateService();
using var cts = new CancellationTokenSource();
// Act
await service.StartAsync(cts.Token);
await Task.Delay(50);
await cts.CancelAsync();
await service.StopAsync(CancellationToken.None);
// Assert
_claimsStoreMock.Verify(
s => s.UpdateFromAuthority(It.Is<IReadOnlyDictionary<EndpointKey, IReadOnlyList<ClaimRequirement>>>(
d => d.ContainsKey(key))),
Times.AtLeastOnce);
}
#endregion
#region ExecuteAsync Tests - Wait for Authority
[Fact]
public async Task ExecuteAsync_WaitForAuthority_FetchesOnStartup()
{
// Arrange
_options.WaitForAuthorityOnStartup = true;
_options.StartupTimeout = TimeSpan.FromMilliseconds(500);
// Authority is immediately available
_claimsProviderMock.Setup(p => p.IsAvailable).Returns(true);
var fetchCalled = false;
_claimsProviderMock.Setup(p => p.GetOverridesAsync(It.IsAny<CancellationToken>()))
.Callback(() => fetchCalled = true)
.ReturnsAsync(new Dictionary<EndpointKey, IReadOnlyList<ClaimRequirement>>());
var service = CreateService();
using var cts = new CancellationTokenSource();
// Act
await service.StartAsync(cts.Token);
await Task.Delay(100);
await cts.CancelAsync();
await service.StopAsync(CancellationToken.None);
// Assert - fetch was called during startup
fetchCalled.Should().BeTrue();
}
[Fact]
public async Task ExecuteAsync_WaitForAuthority_StopsAfterTimeout()
{
// Arrange
_options.WaitForAuthorityOnStartup = true;
_options.StartupTimeout = TimeSpan.FromMilliseconds(100);
_claimsProviderMock.Setup(p => p.IsAvailable).Returns(false);
var service = CreateService();
using var cts = new CancellationTokenSource();
// Act - should not block forever
var startTask = service.StartAsync(cts.Token);
await Task.Delay(300);
await cts.CancelAsync();
await service.StopAsync(CancellationToken.None);
// Assert - should complete even if Authority never becomes available
startTask.IsCompleted.Should().BeTrue();
}
#endregion
#region Push Notification Tests
[Fact]
public async Task ExecuteAsync_WithPushNotifications_SubscribesToEvent()
{
// Arrange
_options.UseAuthorityPushNotifications = true;
var service = CreateService();
using var cts = new CancellationTokenSource();
// Act
await service.StartAsync(cts.Token);
await Task.Delay(50);
await cts.CancelAsync();
await service.StopAsync(CancellationToken.None);
// Assert - verify event subscription by checking it doesn't throw
_claimsProviderMock.VerifyAdd(
p => p.OverridesChanged += It.IsAny<EventHandler<ClaimsOverrideChangedEventArgs>>(),
Times.Once);
}
[Fact]
public async Task Dispose_WithPushNotifications_UnsubscribesFromEvent()
{
// Arrange
_options.UseAuthorityPushNotifications = true;
var service = CreateService();
using var cts = new CancellationTokenSource();
await service.StartAsync(cts.Token);
await Task.Delay(50);
// Act
await cts.CancelAsync();
service.Dispose();
// Assert
_claimsProviderMock.VerifyRemove(
p => p.OverridesChanged -= It.IsAny<EventHandler<ClaimsOverrideChangedEventArgs>>(),
Times.Once);
}
#endregion
#region Error Handling Tests
[Fact]
public async Task ExecuteAsync_ProviderThrows_ContinuesRefreshLoop()
{
// Arrange
var callCount = 0;
_claimsProviderMock.Setup(p => p.GetOverridesAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync(() =>
{
callCount++;
if (callCount == 1)
{
throw new HttpRequestException("Test error");
}
return new Dictionary<EndpointKey, IReadOnlyList<ClaimRequirement>>();
});
var service = CreateService();
using var cts = new CancellationTokenSource();
// Act
await service.StartAsync(cts.Token);
await Task.Delay(250); // Wait for at least 2 refresh cycles
await cts.CancelAsync();
await service.StopAsync(CancellationToken.None);
// Assert - should have continued after error
callCount.Should().BeGreaterThan(1);
}
#endregion
}

View File

@@ -0,0 +1,336 @@
using System.Security.Claims;
using FluentAssertions;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging.Abstractions;
using Moq;
using StellaOps.Gateway.WebService.Authorization;
using StellaOps.Router.Common.Models;
using Xunit;
namespace StellaOps.Gateway.WebService.Tests;
/// <summary>
/// Unit tests for <see cref="AuthorizationMiddleware"/>.
/// </summary>
public sealed class AuthorizationMiddlewareTests
{
private readonly Mock<IEffectiveClaimsStore> _claimsStoreMock;
private readonly Mock<RequestDelegate> _nextMock;
private bool _nextCalled;
public AuthorizationMiddlewareTests()
{
_claimsStoreMock = new Mock<IEffectiveClaimsStore>();
_nextMock = new Mock<RequestDelegate>();
_nextMock.Setup(n => n(It.IsAny<HttpContext>()))
.Callback(() => _nextCalled = true)
.Returns(Task.CompletedTask);
}
private AuthorizationMiddleware CreateMiddleware()
{
return new AuthorizationMiddleware(
_nextMock.Object,
_claimsStoreMock.Object,
NullLogger<AuthorizationMiddleware>.Instance);
}
private static HttpContext CreateHttpContext(
EndpointDescriptor? endpoint = null,
ClaimsPrincipal? user = null)
{
var context = new DefaultHttpContext();
context.Response.Body = new MemoryStream();
if (endpoint is not null)
{
context.Items[RouterHttpContextKeys.EndpointDescriptor] = endpoint;
}
if (user is not null)
{
context.User = user;
}
return context;
}
private static EndpointDescriptor CreateEndpoint(
string serviceName = "test-service",
string method = "GET",
string path = "/api/test",
ClaimRequirement[]? claims = null)
{
return new EndpointDescriptor
{
ServiceName = serviceName,
Version = "1.0.0",
Method = method,
Path = path,
RequiringClaims = claims ?? []
};
}
private static ClaimsPrincipal CreateUserWithClaims(params (string Type, string Value)[] claims)
{
var identity = new ClaimsIdentity(
claims.Select(c => new Claim(c.Type, c.Value)),
"TestAuth");
return new ClaimsPrincipal(identity);
}
#region No Endpoint Tests
[Fact]
public async Task InvokeAsync_WithNoEndpoint_CallsNext()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(endpoint: null);
// Act
await middleware.InvokeAsync(context);
// Assert
_nextCalled.Should().BeTrue();
}
#endregion
#region Empty Claims Tests
[Fact]
public async Task InvokeAsync_WithEmptyRequiringClaims_CallsNext()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var context = CreateHttpContext(endpoint: endpoint);
_claimsStoreMock.Setup(s => s.GetEffectiveClaims(
endpoint.ServiceName, endpoint.Method, endpoint.Path))
.Returns(new List<ClaimRequirement>());
// Act
await middleware.InvokeAsync(context);
// Assert
_nextCalled.Should().BeTrue();
context.Response.StatusCode.Should().Be(StatusCodes.Status200OK);
}
#endregion
#region Matching Claims Tests
[Fact]
public async Task InvokeAsync_WithMatchingClaims_CallsNext()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var user = CreateUserWithClaims(("role", "admin"));
var context = CreateHttpContext(endpoint: endpoint, user: user);
_claimsStoreMock.Setup(s => s.GetEffectiveClaims(
endpoint.ServiceName, endpoint.Method, endpoint.Path))
.Returns(new List<ClaimRequirement>
{
new() { Type = "role", Value = "admin" }
});
// Act
await middleware.InvokeAsync(context);
// Assert
_nextCalled.Should().BeTrue();
context.Response.StatusCode.Should().Be(StatusCodes.Status200OK);
}
[Fact]
public async Task InvokeAsync_WithClaimTypeOnly_MatchesAnyValue()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var user = CreateUserWithClaims(("role", "any-value"));
var context = CreateHttpContext(endpoint: endpoint, user: user);
_claimsStoreMock.Setup(s => s.GetEffectiveClaims(
endpoint.ServiceName, endpoint.Method, endpoint.Path))
.Returns(new List<ClaimRequirement>
{
new() { Type = "role", Value = null } // Any value matches
});
// Act
await middleware.InvokeAsync(context);
// Assert
_nextCalled.Should().BeTrue();
}
[Fact]
public async Task InvokeAsync_WithMultipleMatchingClaims_CallsNext()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var user = CreateUserWithClaims(
("role", "admin"),
("department", "engineering"),
("level", "senior"));
var context = CreateHttpContext(endpoint: endpoint, user: user);
_claimsStoreMock.Setup(s => s.GetEffectiveClaims(
endpoint.ServiceName, endpoint.Method, endpoint.Path))
.Returns(new List<ClaimRequirement>
{
new() { Type = "role", Value = "admin" },
new() { Type = "department", Value = "engineering" }
});
// Act
await middleware.InvokeAsync(context);
// Assert
_nextCalled.Should().BeTrue();
}
#endregion
#region Missing Claims Tests
[Fact]
public async Task InvokeAsync_WithMissingClaim_Returns403()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var user = CreateUserWithClaims(("role", "user")); // Has role, but wrong value
var context = CreateHttpContext(endpoint: endpoint, user: user);
_claimsStoreMock.Setup(s => s.GetEffectiveClaims(
endpoint.ServiceName, endpoint.Method, endpoint.Path))
.Returns(new List<ClaimRequirement>
{
new() { Type = "role", Value = "admin" }
});
// Act
await middleware.InvokeAsync(context);
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status403Forbidden);
}
[Fact]
public async Task InvokeAsync_WithMissingClaimType_Returns403()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var user = CreateUserWithClaims(("department", "engineering"));
var context = CreateHttpContext(endpoint: endpoint, user: user);
_claimsStoreMock.Setup(s => s.GetEffectiveClaims(
endpoint.ServiceName, endpoint.Method, endpoint.Path))
.Returns(new List<ClaimRequirement>
{
new() { Type = "role", Value = "admin" }
});
// Act
await middleware.InvokeAsync(context);
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status403Forbidden);
}
[Fact]
public async Task InvokeAsync_WithNoClaims_Returns403()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var user = CreateUserWithClaims(); // No claims at all
var context = CreateHttpContext(endpoint: endpoint, user: user);
_claimsStoreMock.Setup(s => s.GetEffectiveClaims(
endpoint.ServiceName, endpoint.Method, endpoint.Path))
.Returns(new List<ClaimRequirement>
{
new() { Type = "role", Value = "admin" }
});
// Act
await middleware.InvokeAsync(context);
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status403Forbidden);
}
[Fact]
public async Task InvokeAsync_WithPartialMatchingClaims_Returns403()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var user = CreateUserWithClaims(("role", "admin")); // Has one, missing another
var context = CreateHttpContext(endpoint: endpoint, user: user);
_claimsStoreMock.Setup(s => s.GetEffectiveClaims(
endpoint.ServiceName, endpoint.Method, endpoint.Path))
.Returns(new List<ClaimRequirement>
{
new() { Type = "role", Value = "admin" },
new() { Type = "department", Value = "engineering" }
});
// Act
await middleware.InvokeAsync(context);
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status403Forbidden);
}
#endregion
#region Response Body Tests
[Fact]
public async Task InvokeAsync_WithMissingClaim_WritesErrorResponse()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var user = CreateUserWithClaims();
var context = CreateHttpContext(endpoint: endpoint, user: user);
_claimsStoreMock.Setup(s => s.GetEffectiveClaims(
endpoint.ServiceName, endpoint.Method, endpoint.Path))
.Returns(new List<ClaimRequirement>
{
new() { Type = "role", Value = "admin" }
});
// Act
await middleware.InvokeAsync(context);
// Assert
context.Response.ContentType.Should().StartWith("application/json");
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Contain("Forbidden");
responseBody.Should().Contain("role");
}
#endregion
}

View File

@@ -0,0 +1,404 @@
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Gateway.WebService.Authorization;
using StellaOps.Router.Common.Models;
using Xunit;
namespace StellaOps.Gateway.WebService.Tests;
/// <summary>
/// Unit tests for <see cref="EffectiveClaimsStore"/>.
/// </summary>
public sealed class EffectiveClaimsStoreTests
{
private readonly EffectiveClaimsStore _store;
public EffectiveClaimsStoreTests()
{
_store = new EffectiveClaimsStore(NullLogger<EffectiveClaimsStore>.Instance);
}
#region GetEffectiveClaims Tests
[Fact]
public void GetEffectiveClaims_NoClaimsRegistered_ReturnsEmptyList()
{
// Arrange - fresh store
// Act
var claims = _store.GetEffectiveClaims("service", "GET", "/api/test");
// Assert
claims.Should().BeEmpty();
}
[Fact]
public void GetEffectiveClaims_MicroserviceClaimsOnly_ReturnsMicroserviceClaims()
{
// Arrange
var endpoints = new[]
{
new EndpointDescriptor
{
ServiceName = "test-service",
Version = "1.0.0",
Method = "GET",
Path = "/api/users",
RequiringClaims = [new ClaimRequirement { Type = "role", Value = "admin" }]
}
};
_store.UpdateFromMicroservice("test-service", endpoints);
// Act
var claims = _store.GetEffectiveClaims("test-service", "GET", "/api/users");
// Assert
claims.Should().HaveCount(1);
claims[0].Type.Should().Be("role");
claims[0].Value.Should().Be("admin");
}
[Fact]
public void GetEffectiveClaims_AuthorityOverridesTakePrecedence()
{
// Arrange
var endpoints = new[]
{
new EndpointDescriptor
{
ServiceName = "test-service",
Version = "1.0.0",
Method = "GET",
Path = "/api/users",
RequiringClaims = [new ClaimRequirement { Type = "role", Value = "user" }]
}
};
_store.UpdateFromMicroservice("test-service", endpoints);
var key = EndpointKey.Create("test-service", "GET", "/api/users");
var overrides = new Dictionary<EndpointKey, IReadOnlyList<ClaimRequirement>>
{
[key] = [new ClaimRequirement { Type = "role", Value = "admin" }]
};
_store.UpdateFromAuthority(overrides);
// Act
var claims = _store.GetEffectiveClaims("test-service", "GET", "/api/users");
// Assert
claims.Should().HaveCount(1);
claims[0].Value.Should().Be("admin");
}
[Fact]
public void GetEffectiveClaims_MethodNormalization_MatchesCaseInsensitively()
{
// Arrange
var endpoints = new[]
{
new EndpointDescriptor
{
ServiceName = "test-service",
Version = "1.0.0",
Method = "get",
Path = "/api/users",
RequiringClaims = [new ClaimRequirement { Type = "role", Value = "admin" }]
}
};
_store.UpdateFromMicroservice("test-service", endpoints);
// Act
var claims = _store.GetEffectiveClaims("test-service", "GET", "/api/users");
// Assert
claims.Should().HaveCount(1);
}
[Fact]
public void GetEffectiveClaims_PathNormalization_MatchesCaseInsensitively()
{
// Arrange
var endpoints = new[]
{
new EndpointDescriptor
{
ServiceName = "test-service",
Version = "1.0.0",
Method = "GET",
Path = "/API/USERS",
RequiringClaims = [new ClaimRequirement { Type = "role", Value = "admin" }]
}
};
_store.UpdateFromMicroservice("test-service", endpoints);
// Act
var claims = _store.GetEffectiveClaims("test-service", "GET", "/api/users");
// Assert
claims.Should().HaveCount(1);
}
#endregion
#region UpdateFromMicroservice Tests
[Fact]
public void UpdateFromMicroservice_MultipleEndpoints_RegistersAll()
{
// Arrange
var endpoints = new[]
{
new EndpointDescriptor
{
ServiceName = "test-service",
Version = "1.0.0",
Method = "GET",
Path = "/api/users",
RequiringClaims = [new ClaimRequirement { Type = "role", Value = "reader" }]
},
new EndpointDescriptor
{
ServiceName = "test-service",
Version = "1.0.0",
Method = "POST",
Path = "/api/users",
RequiringClaims = [new ClaimRequirement { Type = "role", Value = "writer" }]
}
};
// Act
_store.UpdateFromMicroservice("test-service", endpoints);
// Assert
_store.GetEffectiveClaims("test-service", "GET", "/api/users")[0].Value.Should().Be("reader");
_store.GetEffectiveClaims("test-service", "POST", "/api/users")[0].Value.Should().Be("writer");
}
[Fact]
public void UpdateFromMicroservice_EmptyClaims_RemovesFromStore()
{
// Arrange - first add some claims
var endpoints1 = new[]
{
new EndpointDescriptor
{
ServiceName = "test-service",
Version = "1.0.0",
Method = "GET",
Path = "/api/users",
RequiringClaims = [new ClaimRequirement { Type = "role", Value = "admin" }]
}
};
_store.UpdateFromMicroservice("test-service", endpoints1);
// Now update with empty claims
var endpoints2 = new[]
{
new EndpointDescriptor
{
ServiceName = "test-service",
Version = "1.0.0",
Method = "GET",
Path = "/api/users",
RequiringClaims = []
}
};
// Act
_store.UpdateFromMicroservice("test-service", endpoints2);
// Assert
_store.GetEffectiveClaims("test-service", "GET", "/api/users").Should().BeEmpty();
}
[Fact]
public void UpdateFromMicroservice_DefaultEmptyClaims_TreatedAsEmpty()
{
// Arrange
var endpoints = new[]
{
new EndpointDescriptor
{
ServiceName = "test-service",
Version = "1.0.0",
Method = "GET",
Path = "/api/users"
// RequiringClaims defaults to []
}
};
// Act
_store.UpdateFromMicroservice("test-service", endpoints);
// Assert
_store.GetEffectiveClaims("test-service", "GET", "/api/users").Should().BeEmpty();
}
#endregion
#region UpdateFromAuthority Tests
[Fact]
public void UpdateFromAuthority_ClearsPreviousOverrides()
{
// Arrange - add initial override
var key1 = EndpointKey.Create("service1", "GET", "/api/test1");
var overrides1 = new Dictionary<EndpointKey, IReadOnlyList<ClaimRequirement>>
{
[key1] = [new ClaimRequirement { Type = "role", Value = "old" }]
};
_store.UpdateFromAuthority(overrides1);
// Update with new overrides (different key)
var key2 = EndpointKey.Create("service2", "POST", "/api/test2");
var overrides2 = new Dictionary<EndpointKey, IReadOnlyList<ClaimRequirement>>
{
[key2] = [new ClaimRequirement { Type = "role", Value = "new" }]
};
// Act
_store.UpdateFromAuthority(overrides2);
// Assert
_store.GetEffectiveClaims("service1", "GET", "/api/test1").Should().BeEmpty();
_store.GetEffectiveClaims("service2", "POST", "/api/test2").Should().HaveCount(1);
}
[Fact]
public void UpdateFromAuthority_EmptyClaimsNotStored()
{
// Arrange
var key = EndpointKey.Create("service", "GET", "/api/test");
var overrides = new Dictionary<EndpointKey, IReadOnlyList<ClaimRequirement>>
{
[key] = []
};
// Act
_store.UpdateFromAuthority(overrides);
// Assert - should fall back to microservice (which is empty)
_store.GetEffectiveClaims("service", "GET", "/api/test").Should().BeEmpty();
}
[Fact]
public void UpdateFromAuthority_MultipleOverrides()
{
// Arrange
var key1 = EndpointKey.Create("service1", "GET", "/api/users");
var key2 = EndpointKey.Create("service1", "POST", "/api/users");
var overrides = new Dictionary<EndpointKey, IReadOnlyList<ClaimRequirement>>
{
[key1] = [new ClaimRequirement { Type = "role", Value = "reader" }],
[key2] = [new ClaimRequirement { Type = "role", Value = "writer" }]
};
// Act
_store.UpdateFromAuthority(overrides);
// Assert
_store.GetEffectiveClaims("service1", "GET", "/api/users")[0].Value.Should().Be("reader");
_store.GetEffectiveClaims("service1", "POST", "/api/users")[0].Value.Should().Be("writer");
}
#endregion
#region RemoveService Tests
[Fact]
public void RemoveService_RemovesMicroserviceClaims()
{
// Arrange
var endpoints = new[]
{
new EndpointDescriptor
{
ServiceName = "test-service",
Version = "1.0.0",
Method = "GET",
Path = "/api/users",
RequiringClaims = [new ClaimRequirement { Type = "role", Value = "admin" }]
}
};
_store.UpdateFromMicroservice("test-service", endpoints);
// Act
_store.RemoveService("test-service");
// Assert
_store.GetEffectiveClaims("test-service", "GET", "/api/users").Should().BeEmpty();
}
[Fact]
public void RemoveService_CaseInsensitive()
{
// Arrange
var endpoints = new[]
{
new EndpointDescriptor
{
ServiceName = "Test-Service",
Version = "1.0.0",
Method = "GET",
Path = "/api/users",
RequiringClaims = [new ClaimRequirement { Type = "role", Value = "admin" }]
}
};
_store.UpdateFromMicroservice("Test-Service", endpoints);
// Act - remove with different case
_store.RemoveService("TEST-SERVICE");
// Assert
_store.GetEffectiveClaims("test-service", "GET", "/api/users").Should().BeEmpty();
}
[Fact]
public void RemoveService_OnlyRemovesTargetService()
{
// Arrange
var endpoints1 = new[]
{
new EndpointDescriptor
{
ServiceName = "service-a",
Version = "1.0.0",
Method = "GET",
Path = "/api/a",
RequiringClaims = [new ClaimRequirement { Type = "role", Value = "a" }]
}
};
var endpoints2 = new[]
{
new EndpointDescriptor
{
ServiceName = "service-b",
Version = "1.0.0",
Method = "GET",
Path = "/api/b",
RequiringClaims = [new ClaimRequirement { Type = "role", Value = "b" }]
}
};
_store.UpdateFromMicroservice("service-a", endpoints1);
_store.UpdateFromMicroservice("service-b", endpoints2);
// Act
_store.RemoveService("service-a");
// Assert
_store.GetEffectiveClaims("service-a", "GET", "/api/a").Should().BeEmpty();
_store.GetEffectiveClaims("service-b", "GET", "/api/b").Should().HaveCount(1);
}
[Fact]
public void RemoveService_UnknownService_DoesNotThrow()
{
// Arrange & Act
var action = () => _store.RemoveService("unknown-service");
// Assert
action.Should().NotThrow();
}
#endregion
}

View File

@@ -0,0 +1,287 @@
using FluentAssertions;
using Microsoft.AspNetCore.Http;
using Moq;
using StellaOps.Gateway.WebService.Middleware;
using StellaOps.Router.Common.Abstractions;
using StellaOps.Router.Common.Models;
using Xunit;
namespace StellaOps.Gateway.WebService.Tests;
/// <summary>
/// Unit tests for <see cref="EndpointResolutionMiddleware"/>.
/// </summary>
public sealed class EndpointResolutionMiddlewareTests
{
private readonly Mock<IGlobalRoutingState> _routingStateMock;
private readonly Mock<RequestDelegate> _nextMock;
private bool _nextCalled;
public EndpointResolutionMiddlewareTests()
{
_routingStateMock = new Mock<IGlobalRoutingState>();
_nextMock = new Mock<RequestDelegate>();
_nextMock.Setup(n => n(It.IsAny<HttpContext>()))
.Callback(() => _nextCalled = true)
.Returns(Task.CompletedTask);
}
private EndpointResolutionMiddleware CreateMiddleware()
{
return new EndpointResolutionMiddleware(_nextMock.Object);
}
private static HttpContext CreateHttpContext(string method = "GET", string path = "/api/test")
{
var context = new DefaultHttpContext();
context.Request.Method = method;
context.Request.Path = path;
context.Response.Body = new MemoryStream();
return context;
}
private static EndpointDescriptor CreateEndpoint(
string serviceName = "test-service",
string method = "GET",
string path = "/api/test")
{
return new EndpointDescriptor
{
ServiceName = serviceName,
Version = "1.0.0",
Method = method,
Path = path
};
}
#region Matching Endpoint Tests
[Fact]
public async Task Invoke_WithMatchingEndpoint_SetsHttpContextItem()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var context = CreateHttpContext();
_routingStateMock.Setup(r => r.ResolveEndpoint("GET", "/api/test"))
.Returns(endpoint);
// Act
await middleware.Invoke(context, _routingStateMock.Object);
// Assert
_nextCalled.Should().BeTrue();
context.Items[RouterHttpContextKeys.EndpointDescriptor].Should().Be(endpoint);
}
[Fact]
public async Task Invoke_WithMatchingEndpoint_CallsNext()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var context = CreateHttpContext();
_routingStateMock.Setup(r => r.ResolveEndpoint("GET", "/api/test"))
.Returns(endpoint);
// Act
await middleware.Invoke(context, _routingStateMock.Object);
// Assert
_nextCalled.Should().BeTrue();
}
#endregion
#region Unknown Path Tests
[Fact]
public async Task Invoke_WithUnknownPath_Returns404()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(path: "/api/unknown");
_routingStateMock.Setup(r => r.ResolveEndpoint("GET", "/api/unknown"))
.Returns((EndpointDescriptor?)null);
// Act
await middleware.Invoke(context, _routingStateMock.Object);
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status404NotFound);
}
[Fact]
public async Task Invoke_WithUnknownPath_WritesErrorResponse()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(path: "/api/unknown");
_routingStateMock.Setup(r => r.ResolveEndpoint("GET", "/api/unknown"))
.Returns((EndpointDescriptor?)null);
// Act
await middleware.Invoke(context, _routingStateMock.Object);
// Assert
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Contain("not found");
responseBody.Should().Contain("/api/unknown");
}
#endregion
#region HTTP Method Tests
[Fact]
public async Task Invoke_WithPostMethod_ResolvesCorrectly()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint(method: "POST");
var context = CreateHttpContext(method: "POST");
_routingStateMock.Setup(r => r.ResolveEndpoint("POST", "/api/test"))
.Returns(endpoint);
// Act
await middleware.Invoke(context, _routingStateMock.Object);
// Assert
_nextCalled.Should().BeTrue();
context.Items[RouterHttpContextKeys.EndpointDescriptor].Should().Be(endpoint);
}
[Fact]
public async Task Invoke_WithDeleteMethod_ResolvesCorrectly()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint(method: "DELETE", path: "/api/users/123");
var context = CreateHttpContext(method: "DELETE", path: "/api/users/123");
_routingStateMock.Setup(r => r.ResolveEndpoint("DELETE", "/api/users/123"))
.Returns(endpoint);
// Act
await middleware.Invoke(context, _routingStateMock.Object);
// Assert
_nextCalled.Should().BeTrue();
}
[Fact]
public async Task Invoke_WithWrongMethod_Returns404()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(method: "DELETE", path: "/api/test");
_routingStateMock.Setup(r => r.ResolveEndpoint("DELETE", "/api/test"))
.Returns((EndpointDescriptor?)null);
// Act
await middleware.Invoke(context, _routingStateMock.Object);
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status404NotFound);
}
#endregion
#region Path Variations Tests
[Fact]
public async Task Invoke_WithParameterizedPath_ResolvesCorrectly()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint(path: "/api/users/{id}");
var context = CreateHttpContext(path: "/api/users/123");
_routingStateMock.Setup(r => r.ResolveEndpoint("GET", "/api/users/123"))
.Returns(endpoint);
// Act
await middleware.Invoke(context, _routingStateMock.Object);
// Assert
_nextCalled.Should().BeTrue();
context.Items[RouterHttpContextKeys.EndpointDescriptor].Should().Be(endpoint);
}
[Fact]
public async Task Invoke_WithRootPath_ResolvesCorrectly()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint(path: "/");
var context = CreateHttpContext(path: "/");
_routingStateMock.Setup(r => r.ResolveEndpoint("GET", "/"))
.Returns(endpoint);
// Act
await middleware.Invoke(context, _routingStateMock.Object);
// Assert
_nextCalled.Should().BeTrue();
}
[Fact]
public async Task Invoke_WithEmptyPath_PassesEmptyStringToRouting()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(path: "");
_routingStateMock.Setup(r => r.ResolveEndpoint("GET", ""))
.Returns((EndpointDescriptor?)null);
// Act
await middleware.Invoke(context, _routingStateMock.Object);
// Assert
_routingStateMock.Verify(r => r.ResolveEndpoint("GET", ""), Times.Once);
}
#endregion
#region Multiple Calls Tests
[Fact]
public async Task Invoke_MultipleCalls_EachResolvesIndependently()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint1 = CreateEndpoint(path: "/api/users");
var endpoint2 = CreateEndpoint(path: "/api/items");
_routingStateMock.Setup(r => r.ResolveEndpoint("GET", "/api/users"))
.Returns(endpoint1);
_routingStateMock.Setup(r => r.ResolveEndpoint("GET", "/api/items"))
.Returns(endpoint2);
var context1 = CreateHttpContext(path: "/api/users");
var context2 = CreateHttpContext(path: "/api/items");
// Act
await middleware.Invoke(context1, _routingStateMock.Object);
await middleware.Invoke(context2, _routingStateMock.Object);
// Assert
context1.Items[RouterHttpContextKeys.EndpointDescriptor].Should().Be(endpoint1);
context2.Items[RouterHttpContextKeys.EndpointDescriptor].Should().Be(endpoint2);
}
#endregion
}

View File

@@ -0,0 +1,356 @@
using System.Net;
using System.Text.Json;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Moq;
using Moq.Protected;
using StellaOps.Gateway.WebService.Authorization;
using StellaOps.Router.Common.Models;
using Xunit;
namespace StellaOps.Gateway.WebService.Tests;
/// <summary>
/// Unit tests for <see cref="HttpAuthorityClaimsProvider"/>.
/// </summary>
public sealed class HttpAuthorityClaimsProviderTests
{
private readonly Mock<HttpMessageHandler> _httpHandlerMock;
private readonly HttpClient _httpClient;
private readonly AuthorityConnectionOptions _options;
public HttpAuthorityClaimsProviderTests()
{
_httpHandlerMock = new Mock<HttpMessageHandler>();
_httpClient = new HttpClient(_httpHandlerMock.Object);
_options = new AuthorityConnectionOptions
{
AuthorityUrl = "http://authority.local"
};
}
private HttpAuthorityClaimsProvider CreateProvider()
{
return new HttpAuthorityClaimsProvider(
_httpClient,
Options.Create(_options),
NullLogger<HttpAuthorityClaimsProvider>.Instance);
}
#region GetOverridesAsync Tests
[Fact]
public async Task GetOverridesAsync_NoAuthorityUrl_ReturnsEmpty()
{
// Arrange
_options.AuthorityUrl = string.Empty;
var provider = CreateProvider();
// Act
var result = await provider.GetOverridesAsync(CancellationToken.None);
// Assert
result.Should().BeEmpty();
provider.IsAvailable.Should().BeFalse();
}
[Fact]
public async Task GetOverridesAsync_WhitespaceUrl_ReturnsEmpty()
{
// Arrange
_options.AuthorityUrl = " ";
var provider = CreateProvider();
// Act
var result = await provider.GetOverridesAsync(CancellationToken.None);
// Assert
result.Should().BeEmpty();
provider.IsAvailable.Should().BeFalse();
}
[Fact]
public async Task GetOverridesAsync_SuccessfulResponse_ParsesOverrides()
{
// Arrange
var responseBody = JsonSerializer.Serialize(new
{
overrides = new[]
{
new
{
serviceName = "test-service",
method = "GET",
path = "/api/users",
requiringClaims = new[]
{
new { type = "role", value = "admin" }
}
}
}
}, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase });
SetupHttpResponse(HttpStatusCode.OK, responseBody);
var provider = CreateProvider();
// Act
var result = await provider.GetOverridesAsync(CancellationToken.None);
// Assert
result.Should().HaveCount(1);
provider.IsAvailable.Should().BeTrue();
var key = result.Keys.First();
key.ServiceName.Should().Be("test-service");
key.Method.Should().Be("GET");
key.Path.Should().Be("/api/users");
result[key].Should().HaveCount(1);
result[key][0].Type.Should().Be("role");
result[key][0].Value.Should().Be("admin");
}
[Fact]
public async Task GetOverridesAsync_EmptyOverrides_ReturnsEmpty()
{
// Arrange
var responseBody = JsonSerializer.Serialize(new
{
overrides = Array.Empty<object>()
});
SetupHttpResponse(HttpStatusCode.OK, responseBody);
var provider = CreateProvider();
// Act
var result = await provider.GetOverridesAsync(CancellationToken.None);
// Assert
result.Should().BeEmpty();
provider.IsAvailable.Should().BeTrue();
}
[Fact]
public async Task GetOverridesAsync_NullOverrides_ReturnsEmpty()
{
// Arrange
var responseBody = "{}";
SetupHttpResponse(HttpStatusCode.OK, responseBody);
var provider = CreateProvider();
// Act
var result = await provider.GetOverridesAsync(CancellationToken.None);
// Assert
result.Should().BeEmpty();
provider.IsAvailable.Should().BeTrue();
}
[Fact]
public async Task GetOverridesAsync_HttpError_ReturnsEmptyAndSetsUnavailable()
{
// Arrange
SetupHttpResponse(HttpStatusCode.InternalServerError, "Error");
var provider = CreateProvider();
// Act
var result = await provider.GetOverridesAsync(CancellationToken.None);
// Assert
result.Should().BeEmpty();
provider.IsAvailable.Should().BeFalse();
}
[Fact]
public async Task GetOverridesAsync_Timeout_ReturnsEmptyAndSetsUnavailable()
{
// Arrange
_httpHandlerMock.Protected()
.Setup<Task<HttpResponseMessage>>(
"SendAsync",
ItExpr.IsAny<HttpRequestMessage>(),
ItExpr.IsAny<CancellationToken>())
.ThrowsAsync(new TaskCanceledException("Timeout"));
var provider = CreateProvider();
// Act
var result = await provider.GetOverridesAsync(CancellationToken.None);
// Assert
result.Should().BeEmpty();
provider.IsAvailable.Should().BeFalse();
}
[Fact]
public async Task GetOverridesAsync_NetworkError_ReturnsEmptyAndSetsUnavailable()
{
// Arrange
_httpHandlerMock.Protected()
.Setup<Task<HttpResponseMessage>>(
"SendAsync",
ItExpr.IsAny<HttpRequestMessage>(),
ItExpr.IsAny<CancellationToken>())
.ThrowsAsync(new HttpRequestException("Connection refused"));
var provider = CreateProvider();
// Act
var result = await provider.GetOverridesAsync(CancellationToken.None);
// Assert
result.Should().BeEmpty();
provider.IsAvailable.Should().BeFalse();
}
[Fact]
public async Task GetOverridesAsync_TrimsTrailingSlash()
{
// Arrange
_options.AuthorityUrl = "http://authority.local/";
var responseBody = JsonSerializer.Serialize(new { overrides = Array.Empty<object>() });
string? capturedUrl = null;
_httpHandlerMock.Protected()
.Setup<Task<HttpResponseMessage>>(
"SendAsync",
ItExpr.IsAny<HttpRequestMessage>(),
ItExpr.IsAny<CancellationToken>())
.ReturnsAsync((HttpRequestMessage req, CancellationToken _) =>
{
capturedUrl = req.RequestUri?.ToString();
return new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(responseBody)
};
});
var provider = CreateProvider();
// Act
await provider.GetOverridesAsync(CancellationToken.None);
// Assert
capturedUrl.Should().Be("http://authority.local/api/v1/claims/overrides");
}
[Fact]
public async Task GetOverridesAsync_MultipleOverrides_ParsesAll()
{
// Arrange
var responseBody = JsonSerializer.Serialize(new
{
overrides = new[]
{
new
{
serviceName = "service-a",
method = "GET",
path = "/api/a",
requiringClaims = new[] { new { type = "role", value = "a" } }
},
new
{
serviceName = "service-b",
method = "POST",
path = "/api/b",
requiringClaims = new[]
{
new { type = "role", value = "b1" },
new { type = "department", value = "b2" }
}
}
}
}, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase });
SetupHttpResponse(HttpStatusCode.OK, responseBody);
var provider = CreateProvider();
// Act
var result = await provider.GetOverridesAsync(CancellationToken.None);
// Assert
result.Should().HaveCount(2);
}
#endregion
#region IsAvailable Tests
[Fact]
public void IsAvailable_InitiallyFalse()
{
// Arrange
var provider = CreateProvider();
// Assert
provider.IsAvailable.Should().BeFalse();
}
[Fact]
public async Task IsAvailable_TrueAfterSuccessfulFetch()
{
// Arrange
SetupHttpResponse(HttpStatusCode.OK, "{}");
var provider = CreateProvider();
// Act
await provider.GetOverridesAsync(CancellationToken.None);
// Assert
provider.IsAvailable.Should().BeTrue();
}
[Fact]
public async Task IsAvailable_FalseAfterFailedFetch()
{
// Arrange
SetupHttpResponse(HttpStatusCode.ServiceUnavailable, "");
var provider = CreateProvider();
// Act
await provider.GetOverridesAsync(CancellationToken.None);
// Assert
provider.IsAvailable.Should().BeFalse();
}
#endregion
#region OverridesChanged Event Tests
[Fact]
public void OverridesChanged_CanBeSubscribed()
{
// Arrange
var provider = CreateProvider();
var eventRaised = false;
// Act
provider.OverridesChanged += (_, _) => eventRaised = true;
// Assert - no exception during subscription, event not raised yet
eventRaised.Should().BeFalse();
provider.Should().NotBeNull();
}
#endregion
#region Helper Methods
private void SetupHttpResponse(HttpStatusCode statusCode, string content)
{
_httpHandlerMock.Protected()
.Setup<Task<HttpResponseMessage>>(
"SendAsync",
ItExpr.IsAny<HttpRequestMessage>(),
ItExpr.IsAny<CancellationToken>())
.ReturnsAsync(new HttpResponseMessage(statusCode)
{
Content = new StringContent(content)
});
}
#endregion
}

View File

@@ -0,0 +1,182 @@
using FluentAssertions;
using StellaOps.Gateway.WebService.OpenApi;
using StellaOps.Router.Common.Models;
using Xunit;
namespace StellaOps.Gateway.WebService.Tests.OpenApi;
public class ClaimSecurityMapperTests
{
[Fact]
public void GenerateSecuritySchemes_WithNoEndpoints_ReturnsBearerAuthOnly()
{
// Arrange
var endpoints = Array.Empty<EndpointDescriptor>();
// Act
var schemes = ClaimSecurityMapper.GenerateSecuritySchemes(endpoints, "/auth/token");
// Assert
schemes.Should().ContainKey("BearerAuth");
schemes.Should().NotContainKey("OAuth2");
}
[Fact]
public void GenerateSecuritySchemes_WithClaimRequirements_ReturnsOAuth2()
{
// Arrange
var endpoints = new[]
{
new EndpointDescriptor
{
Method = "POST",
Path = "/test",
ServiceName = "test",
Version = "1.0.0",
RequiringClaims = [new ClaimRequirement { Type = "test:write" }]
}
};
// Act
var schemes = ClaimSecurityMapper.GenerateSecuritySchemes(endpoints, "/auth/token");
// Assert
schemes.Should().ContainKey("BearerAuth");
schemes.Should().ContainKey("OAuth2");
}
[Fact]
public void GenerateSecuritySchemes_CollectsAllUniqueScopes()
{
// Arrange
var endpoints = new[]
{
new EndpointDescriptor
{
Method = "POST",
Path = "/invoices",
ServiceName = "billing",
Version = "1.0.0",
RequiringClaims = [new ClaimRequirement { Type = "billing:write" }]
},
new EndpointDescriptor
{
Method = "GET",
Path = "/invoices",
ServiceName = "billing",
Version = "1.0.0",
RequiringClaims = [new ClaimRequirement { Type = "billing:read" }]
},
new EndpointDescriptor
{
Method = "POST",
Path = "/payments",
ServiceName = "billing",
Version = "1.0.0",
RequiringClaims = [new ClaimRequirement { Type = "billing:write" }] // Duplicate
}
};
// Act
var schemes = ClaimSecurityMapper.GenerateSecuritySchemes(endpoints, "/auth/token");
// Assert
var oauth2 = schemes["OAuth2"];
var scopes = oauth2!["flows"]!["clientCredentials"]!["scopes"]!;
scopes.AsObject().Count.Should().Be(2); // Only unique scopes
scopes["billing:write"].Should().NotBeNull();
scopes["billing:read"].Should().NotBeNull();
}
[Fact]
public void GenerateSecuritySchemes_SetsCorrectTokenUrl()
{
// Arrange
var endpoints = new[]
{
new EndpointDescriptor
{
Method = "POST",
Path = "/test",
ServiceName = "test",
Version = "1.0.0",
RequiringClaims = [new ClaimRequirement { Type = "test:write" }]
}
};
// Act
var schemes = ClaimSecurityMapper.GenerateSecuritySchemes(endpoints, "/custom/token");
// Assert
var tokenUrl = schemes["OAuth2"]!["flows"]!["clientCredentials"]!["tokenUrl"]!.GetValue<string>();
tokenUrl.Should().Be("/custom/token");
}
[Fact]
public void GenerateSecurityRequirement_WithNoClaimRequirements_ReturnsEmptyArray()
{
// Arrange
var endpoint = new EndpointDescriptor
{
Method = "GET",
Path = "/public",
ServiceName = "test",
Version = "1.0.0",
RequiringClaims = []
};
// Act
var requirement = ClaimSecurityMapper.GenerateSecurityRequirement(endpoint);
// Assert
requirement.Count.Should().Be(0);
}
[Fact]
public void GenerateSecurityRequirement_WithClaimRequirements_ReturnsBearerAndOAuth2()
{
// Arrange
var endpoint = new EndpointDescriptor
{
Method = "POST",
Path = "/secure",
ServiceName = "test",
Version = "1.0.0",
RequiringClaims =
[
new ClaimRequirement { Type = "billing:write" },
new ClaimRequirement { Type = "billing:admin" }
]
};
// Act
var requirement = ClaimSecurityMapper.GenerateSecurityRequirement(endpoint);
// Assert
requirement.Count.Should().Be(1);
var req = requirement[0]!.AsObject();
req.Should().ContainKey("BearerAuth");
req.Should().ContainKey("OAuth2");
var scopes = req["OAuth2"]!.AsArray();
scopes.Count.Should().Be(2);
}
[Fact]
public void GenerateSecuritySchemes_BearerAuth_HasCorrectStructure()
{
// Arrange
var endpoints = Array.Empty<EndpointDescriptor>();
// Act
var schemes = ClaimSecurityMapper.GenerateSecuritySchemes(endpoints, "/auth/token");
// Assert
var bearer = schemes["BearerAuth"]!.AsObject();
bearer["type"]!.GetValue<string>().Should().Be("http");
bearer["scheme"]!.GetValue<string>().Should().Be("bearer");
bearer["bearerFormat"]!.GetValue<string>().Should().Be("JWT");
}
}

View File

@@ -0,0 +1,166 @@
using FluentAssertions;
using Microsoft.Extensions.Options;
using Moq;
using StellaOps.Gateway.WebService.OpenApi;
using Xunit;
namespace StellaOps.Gateway.WebService.Tests.OpenApi;
public class GatewayOpenApiDocumentCacheTests
{
private readonly Mock<IOpenApiDocumentGenerator> _generator = new();
private readonly OpenApiAggregationOptions _options = new() { CacheTtlSeconds = 60 };
private readonly GatewayOpenApiDocumentCache _sut;
public GatewayOpenApiDocumentCacheTests()
{
_sut = new GatewayOpenApiDocumentCache(
_generator.Object,
Options.Create(_options));
}
[Fact]
public void GetDocument_FirstCall_GeneratesDocument()
{
// Arrange
var expectedDoc = """{"openapi":"3.1.0"}""";
_generator.Setup(x => x.GenerateDocument()).Returns(expectedDoc);
// Act
var (doc, _, _) = _sut.GetDocument();
// Assert
doc.Should().Be(expectedDoc);
_generator.Verify(x => x.GenerateDocument(), Times.Once);
}
[Fact]
public void GetDocument_SubsequentCalls_ReturnsCachedDocument()
{
// Arrange
var expectedDoc = """{"openapi":"3.1.0"}""";
_generator.Setup(x => x.GenerateDocument()).Returns(expectedDoc);
// Act
var (doc1, _, _) = _sut.GetDocument();
var (doc2, _, _) = _sut.GetDocument();
var (doc3, _, _) = _sut.GetDocument();
// Assert
doc1.Should().Be(expectedDoc);
doc2.Should().Be(expectedDoc);
doc3.Should().Be(expectedDoc);
_generator.Verify(x => x.GenerateDocument(), Times.Once);
}
[Fact]
public void GetDocument_AfterInvalidate_RegeneratesDocument()
{
// Arrange
var doc1 = """{"openapi":"3.1.0","version":"1"}""";
var doc2 = """{"openapi":"3.1.0","version":"2"}""";
_generator.SetupSequence(x => x.GenerateDocument())
.Returns(doc1)
.Returns(doc2);
// Act
var (result1, _, _) = _sut.GetDocument();
_sut.Invalidate();
var (result2, _, _) = _sut.GetDocument();
// Assert
result1.Should().Be(doc1);
result2.Should().Be(doc2);
_generator.Verify(x => x.GenerateDocument(), Times.Exactly(2));
}
[Fact]
public void GetDocument_ReturnsConsistentETag()
{
// Arrange
var expectedDoc = """{"openapi":"3.1.0"}""";
_generator.Setup(x => x.GenerateDocument()).Returns(expectedDoc);
// Act
var (_, etag1, _) = _sut.GetDocument();
var (_, etag2, _) = _sut.GetDocument();
// Assert
etag1.Should().NotBeNullOrEmpty();
etag1.Should().Be(etag2);
etag1.Should().StartWith("\"").And.EndWith("\""); // ETag format
}
[Fact]
public void GetDocument_DifferentContent_DifferentETag()
{
// Arrange
var doc1 = """{"openapi":"3.1.0","version":"1"}""";
var doc2 = """{"openapi":"3.1.0","version":"2"}""";
_generator.SetupSequence(x => x.GenerateDocument())
.Returns(doc1)
.Returns(doc2);
// Act
var (_, etag1, _) = _sut.GetDocument();
_sut.Invalidate();
var (_, etag2, _) = _sut.GetDocument();
// Assert
etag1.Should().NotBe(etag2);
}
[Fact]
public void GetDocument_ReturnsGenerationTimestamp()
{
// Arrange
_generator.Setup(x => x.GenerateDocument()).Returns("{}");
var beforeGeneration = DateTime.UtcNow;
// Act
var (_, _, generatedAt) = _sut.GetDocument();
// Assert
generatedAt.Should().BeOnOrAfter(beforeGeneration);
generatedAt.Should().BeOnOrBefore(DateTime.UtcNow);
}
[Fact]
public void Invalidate_CanBeCalledMultipleTimes()
{
// Arrange
_generator.Setup(x => x.GenerateDocument()).Returns("{}");
_sut.GetDocument();
// Act & Assert - should not throw
_sut.Invalidate();
_sut.Invalidate();
_sut.Invalidate();
}
[Fact]
public void GetDocument_WithZeroTtl_AlwaysRegenerates()
{
// Arrange
var options = new OpenApiAggregationOptions { CacheTtlSeconds = 0 };
var sut = new GatewayOpenApiDocumentCache(
_generator.Object,
Options.Create(options));
var callCount = 0;
_generator.Setup(x => x.GenerateDocument())
.Returns(() => $"{{\"call\":{++callCount}}}");
// Act
sut.GetDocument();
// Wait a tiny bit to ensure TTL is exceeded
Thread.Sleep(10);
sut.GetDocument();
// Assert
// With 0 TTL, each call should regenerate
_generator.Verify(x => x.GenerateDocument(), Times.Exactly(2));
}
}

View File

@@ -0,0 +1,338 @@
using System.Text.Json;
using FluentAssertions;
using Microsoft.Extensions.Options;
using Moq;
using StellaOps.Gateway.WebService.OpenApi;
using StellaOps.Router.Common.Abstractions;
using StellaOps.Router.Common.Enums;
using StellaOps.Router.Common.Models;
using Xunit;
namespace StellaOps.Gateway.WebService.Tests.OpenApi;
public class OpenApiDocumentGeneratorTests
{
private readonly Mock<IGlobalRoutingState> _routingState = new();
private readonly OpenApiAggregationOptions _options = new();
private readonly OpenApiDocumentGenerator _sut;
public OpenApiDocumentGeneratorTests()
{
_sut = new OpenApiDocumentGenerator(
_routingState.Object,
Options.Create(_options));
}
private static ConnectionState CreateConnection(
string serviceName = "test-service",
string version = "1.0.0",
params EndpointDescriptor[] endpoints)
{
var connection = new ConnectionState
{
ConnectionId = $"conn-{serviceName}",
Instance = new InstanceDescriptor
{
InstanceId = $"inst-{serviceName}",
ServiceName = serviceName,
Version = version,
Region = "us-east-1"
},
Status = InstanceHealthStatus.Healthy,
TransportType = TransportType.InMemory,
Schemas = new Dictionary<string, SchemaDefinition>(),
OpenApiInfo = new ServiceOpenApiInfo
{
Title = serviceName,
Description = $"Test {serviceName} service"
}
};
foreach (var endpoint in endpoints)
{
connection.Endpoints[(endpoint.Method, endpoint.Path)] = endpoint;
}
return connection;
}
[Fact]
public void GenerateDocument_WithNoConnections_ReturnsValidOpenApiDocument()
{
// Arrange
_routingState.Setup(x => x.GetAllConnections()).Returns([]);
// Act
var document = _sut.GenerateDocument();
// Assert
document.Should().NotBeNullOrEmpty();
var doc = JsonDocument.Parse(document);
doc.RootElement.GetProperty("openapi").GetString().Should().Be("3.1.0");
doc.RootElement.GetProperty("info").GetProperty("title").GetString().Should().Be(_options.Title);
}
[Fact]
public void GenerateDocument_SetsCorrectInfoSection()
{
// Arrange
_options.Title = "My Gateway API";
_options.Description = "My description";
_options.Version = "2.0.0";
_options.LicenseName = "MIT";
_routingState.Setup(x => x.GetAllConnections()).Returns([]);
// Act
var document = _sut.GenerateDocument();
// Assert
var doc = JsonDocument.Parse(document);
var info = doc.RootElement.GetProperty("info");
info.GetProperty("title").GetString().Should().Be("My Gateway API");
info.GetProperty("description").GetString().Should().Be("My description");
info.GetProperty("version").GetString().Should().Be("2.0.0");
info.GetProperty("license").GetProperty("name").GetString().Should().Be("MIT");
}
[Fact]
public void GenerateDocument_WithConnections_GeneratesPaths()
{
// Arrange
var endpoint = new EndpointDescriptor
{
Method = "GET",
Path = "/api/items",
ServiceName = "inventory",
Version = "1.0.0"
};
var connection = CreateConnection("inventory", "1.0.0", endpoint);
_routingState.Setup(x => x.GetAllConnections()).Returns([connection]);
// Act
var document = _sut.GenerateDocument();
// Assert
var doc = JsonDocument.Parse(document);
var paths = doc.RootElement.GetProperty("paths");
paths.TryGetProperty("/api/items", out var pathItem).Should().BeTrue();
pathItem.TryGetProperty("get", out var operation).Should().BeTrue();
}
[Fact]
public void GenerateDocument_WithSchemaInfo_IncludesDocumentation()
{
// Arrange
var endpoint = new EndpointDescriptor
{
Method = "POST",
Path = "/invoices",
ServiceName = "billing",
Version = "1.0.0",
SchemaInfo = new EndpointSchemaInfo
{
Summary = "Create invoice",
Description = "Creates a new invoice",
Tags = ["billing", "invoices"],
Deprecated = false
}
};
var connection = CreateConnection("billing", "1.0.0", endpoint);
_routingState.Setup(x => x.GetAllConnections()).Returns([connection]);
// Act
var document = _sut.GenerateDocument();
// Assert
var doc = JsonDocument.Parse(document);
var operation = doc.RootElement
.GetProperty("paths")
.GetProperty("/invoices")
.GetProperty("post");
operation.GetProperty("summary").GetString().Should().Be("Create invoice");
operation.GetProperty("description").GetString().Should().Be("Creates a new invoice");
}
[Fact]
public void GenerateDocument_WithSchemas_IncludesSchemaReferences()
{
// Arrange
var endpoint = new EndpointDescriptor
{
Method = "POST",
Path = "/invoices",
ServiceName = "billing",
Version = "1.0.0",
SchemaInfo = new EndpointSchemaInfo
{
RequestSchemaId = "CreateInvoiceRequest"
}
};
var connection = CreateConnection("billing", "1.0.0", endpoint);
var connectionWithSchemas = new ConnectionState
{
ConnectionId = connection.ConnectionId,
Instance = connection.Instance,
Status = connection.Status,
TransportType = connection.TransportType,
Schemas = new Dictionary<string, SchemaDefinition>
{
["CreateInvoiceRequest"] = new SchemaDefinition
{
SchemaId = "CreateInvoiceRequest",
SchemaJson = """{"type": "object", "properties": {"amount": {"type": "number"}}}""",
ETag = "\"ABC123\""
}
}
};
connectionWithSchemas.Endpoints[(endpoint.Method, endpoint.Path)] = endpoint;
_routingState.Setup(x => x.GetAllConnections()).Returns([connectionWithSchemas]);
// Act
var document = _sut.GenerateDocument();
// Assert
var doc = JsonDocument.Parse(document);
// Check request body reference
var requestBody = doc.RootElement
.GetProperty("paths")
.GetProperty("/invoices")
.GetProperty("post")
.GetProperty("requestBody")
.GetProperty("content")
.GetProperty("application/json")
.GetProperty("schema")
.GetProperty("$ref")
.GetString();
requestBody.Should().Be("#/components/schemas/billing_CreateInvoiceRequest");
// Check schema exists in components
var schemas = doc.RootElement.GetProperty("components").GetProperty("schemas");
schemas.TryGetProperty("billing_CreateInvoiceRequest", out _).Should().BeTrue();
}
[Fact]
public void GenerateDocument_WithClaimRequirements_IncludesSecurity()
{
// Arrange
var endpoint = new EndpointDescriptor
{
Method = "POST",
Path = "/invoices",
ServiceName = "billing",
Version = "1.0.0",
RequiringClaims = [new ClaimRequirement { Type = "billing:write" }]
};
var connection = CreateConnection("billing", "1.0.0", endpoint);
_routingState.Setup(x => x.GetAllConnections()).Returns([connection]);
// Act
var document = _sut.GenerateDocument();
// Assert
var doc = JsonDocument.Parse(document);
// Check security schemes
var securitySchemes = doc.RootElement
.GetProperty("components")
.GetProperty("securitySchemes");
securitySchemes.TryGetProperty("BearerAuth", out _).Should().BeTrue();
securitySchemes.TryGetProperty("OAuth2", out _).Should().BeTrue();
// Check operation security
var operation = doc.RootElement
.GetProperty("paths")
.GetProperty("/invoices")
.GetProperty("post");
operation.TryGetProperty("security", out _).Should().BeTrue();
}
[Fact]
public void GenerateDocument_WithMultipleServices_GeneratesTags()
{
// Arrange
var billingEndpoint = new EndpointDescriptor
{
Method = "POST",
Path = "/invoices",
ServiceName = "billing",
Version = "1.0.0"
};
var inventoryEndpoint = new EndpointDescriptor
{
Method = "GET",
Path = "/items",
ServiceName = "inventory",
Version = "2.0.0"
};
var billingConn = CreateConnection("billing", "1.0.0", billingEndpoint);
var inventoryConn = CreateConnection("inventory", "2.0.0", inventoryEndpoint);
_routingState.Setup(x => x.GetAllConnections()).Returns([billingConn, inventoryConn]);
// Act
var document = _sut.GenerateDocument();
// Assert
var doc = JsonDocument.Parse(document);
var tags = doc.RootElement.GetProperty("tags");
tags.GetArrayLength().Should().Be(2);
var tagNames = new List<string>();
foreach (var tag in tags.EnumerateArray())
{
tagNames.Add(tag.GetProperty("name").GetString()!);
}
tagNames.Should().Contain("billing");
tagNames.Should().Contain("inventory");
}
[Fact]
public void GenerateDocument_WithDeprecatedEndpoint_SetsDeprecatedFlag()
{
// Arrange
var endpoint = new EndpointDescriptor
{
Method = "GET",
Path = "/legacy",
ServiceName = "test",
Version = "1.0.0",
SchemaInfo = new EndpointSchemaInfo
{
Deprecated = true
}
};
var connection = CreateConnection("test", "1.0.0", endpoint);
_routingState.Setup(x => x.GetAllConnections()).Returns([connection]);
// Act
var document = _sut.GenerateDocument();
// Assert
var doc = JsonDocument.Parse(document);
var operation = doc.RootElement
.GetProperty("paths")
.GetProperty("/legacy")
.GetProperty("get");
operation.GetProperty("deprecated").GetBoolean().Should().BeTrue();
}
}

View File

@@ -0,0 +1,337 @@
using FluentAssertions;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Moq;
using StellaOps.Gateway.WebService.Middleware;
using StellaOps.Router.Common.Models;
using Xunit;
namespace StellaOps.Gateway.WebService.Tests;
/// <summary>
/// Unit tests for <see cref="PayloadLimitsMiddleware"/>.
/// </summary>
public sealed class PayloadLimitsMiddlewareTests
{
private readonly Mock<IPayloadTracker> _trackerMock;
private readonly Mock<RequestDelegate> _nextMock;
private readonly PayloadLimits _defaultLimits;
private bool _nextCalled;
public PayloadLimitsMiddlewareTests()
{
_trackerMock = new Mock<IPayloadTracker>();
_nextMock = new Mock<RequestDelegate>();
_nextMock.Setup(n => n(It.IsAny<HttpContext>()))
.Callback(() => _nextCalled = true)
.Returns(Task.CompletedTask);
_defaultLimits = new PayloadLimits
{
MaxRequestBytesPerCall = 10 * 1024 * 1024, // 10MB
MaxRequestBytesPerConnection = 100 * 1024 * 1024, // 100MB
MaxAggregateInflightBytes = 1024 * 1024 * 1024 // 1GB
};
}
private PayloadLimitsMiddleware CreateMiddleware(PayloadLimits? limits = null)
{
return new PayloadLimitsMiddleware(
_nextMock.Object,
Options.Create(limits ?? _defaultLimits),
NullLogger<PayloadLimitsMiddleware>.Instance);
}
private static HttpContext CreateHttpContext(long? contentLength = null, string connectionId = "conn-1")
{
var context = new DefaultHttpContext();
context.Response.Body = new MemoryStream();
context.Request.Body = new MemoryStream();
context.Connection.Id = connectionId;
if (contentLength.HasValue)
{
context.Request.ContentLength = contentLength;
}
return context;
}
#region Within Limits Tests
[Fact]
public async Task Invoke_WithinLimits_CallsNext()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(contentLength: 1000);
_trackerMock.Setup(t => t.TryReserve("conn-1", 1000))
.Returns(true);
// Act
await middleware.Invoke(context, _trackerMock.Object);
// Assert
_nextCalled.Should().BeTrue();
}
[Fact]
public async Task Invoke_WithNoContentLength_CallsNext()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(contentLength: null);
_trackerMock.Setup(t => t.TryReserve("conn-1", 0))
.Returns(true);
// Act
await middleware.Invoke(context, _trackerMock.Object);
// Assert
_nextCalled.Should().BeTrue();
}
[Fact]
public async Task Invoke_WithZeroContentLength_CallsNext()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(contentLength: 0);
_trackerMock.Setup(t => t.TryReserve("conn-1", 0))
.Returns(true);
// Act
await middleware.Invoke(context, _trackerMock.Object);
// Assert
_nextCalled.Should().BeTrue();
}
#endregion
#region Per-Call Limit Tests
[Fact]
public async Task Invoke_ExceedsPerCallLimit_Returns413()
{
// Arrange
var limits = new PayloadLimits { MaxRequestBytesPerCall = 1000 };
var middleware = CreateMiddleware(limits);
var context = CreateHttpContext(contentLength: 2000);
// Act
await middleware.Invoke(context, _trackerMock.Object);
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status413PayloadTooLarge);
}
[Fact]
public async Task Invoke_ExceedsPerCallLimit_WritesErrorResponse()
{
// Arrange
var limits = new PayloadLimits { MaxRequestBytesPerCall = 1000 };
var middleware = CreateMiddleware(limits);
var context = CreateHttpContext(contentLength: 2000);
// Act
await middleware.Invoke(context, _trackerMock.Object);
// Assert
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Contain("Payload Too Large");
responseBody.Should().Contain("1000");
responseBody.Should().Contain("2000");
}
[Fact]
public async Task Invoke_ExactlyAtPerCallLimit_CallsNext()
{
// Arrange
var limits = new PayloadLimits { MaxRequestBytesPerCall = 1000 };
var middleware = CreateMiddleware(limits);
var context = CreateHttpContext(contentLength: 1000);
_trackerMock.Setup(t => t.TryReserve("conn-1", 1000))
.Returns(true);
// Act
await middleware.Invoke(context, _trackerMock.Object);
// Assert
_nextCalled.Should().BeTrue();
}
#endregion
#region Aggregate Limit Tests
[Fact]
public async Task Invoke_ExceedsAggregateLimit_Returns503()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(contentLength: 1000);
_trackerMock.Setup(t => t.TryReserve("conn-1", 1000))
.Returns(false);
_trackerMock.Setup(t => t.IsOverloaded)
.Returns(true);
_trackerMock.Setup(t => t.CurrentInflightBytes)
.Returns(1024 * 1024 * 1024); // 1GB
// Act
await middleware.Invoke(context, _trackerMock.Object);
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status503ServiceUnavailable);
}
[Fact]
public async Task Invoke_ExceedsAggregateLimit_WritesOverloadedResponse()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(contentLength: 1000);
_trackerMock.Setup(t => t.TryReserve("conn-1", 1000))
.Returns(false);
_trackerMock.Setup(t => t.IsOverloaded)
.Returns(true);
// Act
await middleware.Invoke(context, _trackerMock.Object);
// Assert
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Contain("Overloaded");
}
#endregion
#region Per-Connection Limit Tests
[Fact]
public async Task Invoke_ExceedsPerConnectionLimit_Returns429()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(contentLength: 1000);
_trackerMock.Setup(t => t.TryReserve("conn-1", 1000))
.Returns(false);
_trackerMock.Setup(t => t.IsOverloaded)
.Returns(false); // Not aggregate limit
_trackerMock.Setup(t => t.GetConnectionInflightBytes("conn-1"))
.Returns(100 * 1024 * 1024); // 100MB
// Act
await middleware.Invoke(context, _trackerMock.Object);
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status429TooManyRequests);
}
[Fact]
public async Task Invoke_ExceedsPerConnectionLimit_WritesErrorResponse()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(contentLength: 1000);
_trackerMock.Setup(t => t.TryReserve("conn-1", 1000))
.Returns(false);
_trackerMock.Setup(t => t.IsOverloaded)
.Returns(false);
// Act
await middleware.Invoke(context, _trackerMock.Object);
// Assert
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Contain("Too Many Requests");
}
#endregion
#region Release Tests
[Fact]
public async Task Invoke_AfterSuccess_ReleasesReservation()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(contentLength: 1000);
_trackerMock.Setup(t => t.TryReserve("conn-1", 1000))
.Returns(true);
// Act
await middleware.Invoke(context, _trackerMock.Object);
// Assert
_trackerMock.Verify(t => t.Release("conn-1", It.IsAny<long>()), Times.Once);
}
[Fact]
public async Task Invoke_AfterNextThrows_StillReleasesReservation()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(contentLength: 1000);
_trackerMock.Setup(t => t.TryReserve("conn-1", 1000))
.Returns(true);
_nextMock.Setup(n => n(It.IsAny<HttpContext>()))
.ThrowsAsync(new InvalidOperationException("Test error"));
// Act
var act = async () => await middleware.Invoke(context, _trackerMock.Object);
// Assert
await act.Should().ThrowAsync<InvalidOperationException>();
_trackerMock.Verify(t => t.Release("conn-1", It.IsAny<long>()), Times.Once);
}
#endregion
#region Different Connections Tests
[Fact]
public async Task Invoke_DifferentConnections_TrackedSeparately()
{
// Arrange
var middleware = CreateMiddleware();
var context1 = CreateHttpContext(contentLength: 1000, connectionId: "conn-1");
var context2 = CreateHttpContext(contentLength: 2000, connectionId: "conn-2");
_trackerMock.Setup(t => t.TryReserve(It.IsAny<string>(), It.IsAny<long>()))
.Returns(true);
// Act
await middleware.Invoke(context1, _trackerMock.Object);
await middleware.Invoke(context2, _trackerMock.Object);
// Assert
_trackerMock.Verify(t => t.TryReserve("conn-1", 1000), Times.Once);
_trackerMock.Verify(t => t.TryReserve("conn-2", 2000), Times.Once);
}
#endregion
}

View File

@@ -0,0 +1,429 @@
using FluentAssertions;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Options;
using Moq;
using StellaOps.Gateway.WebService.Middleware;
using StellaOps.Router.Common.Abstractions;
using StellaOps.Router.Common.Enums;
using StellaOps.Router.Common.Models;
using Xunit;
namespace StellaOps.Gateway.WebService.Tests;
/// <summary>
/// Unit tests for <see cref="RoutingDecisionMiddleware"/>.
/// </summary>
public sealed class RoutingDecisionMiddlewareTests
{
private readonly Mock<IRoutingPlugin> _routingPluginMock;
private readonly Mock<IGlobalRoutingState> _routingStateMock;
private readonly Mock<RequestDelegate> _nextMock;
private readonly GatewayNodeConfig _gatewayConfig;
private readonly RoutingOptions _routingOptions;
private bool _nextCalled;
public RoutingDecisionMiddlewareTests()
{
_routingPluginMock = new Mock<IRoutingPlugin>();
_routingStateMock = new Mock<IGlobalRoutingState>();
_nextMock = new Mock<RequestDelegate>();
_nextMock.Setup(n => n(It.IsAny<HttpContext>()))
.Callback(() => _nextCalled = true)
.Returns(Task.CompletedTask);
_gatewayConfig = new GatewayNodeConfig
{
Region = "us-east-1",
NodeId = "gw-01",
Environment = "test"
};
_routingOptions = new RoutingOptions
{
DefaultVersion = "1.0.0"
};
}
private RoutingDecisionMiddleware CreateMiddleware()
{
return new RoutingDecisionMiddleware(_nextMock.Object);
}
private HttpContext CreateHttpContext(EndpointDescriptor? endpoint = null)
{
var context = new DefaultHttpContext();
context.Request.Method = "GET";
context.Request.Path = "/api/test";
context.Response.Body = new MemoryStream();
if (endpoint is not null)
{
context.Items[RouterHttpContextKeys.EndpointDescriptor] = endpoint;
}
return context;
}
private static EndpointDescriptor CreateEndpoint(
string serviceName = "test-service",
string version = "1.0.0")
{
return new EndpointDescriptor
{
ServiceName = serviceName,
Version = version,
Method = "GET",
Path = "/api/test"
};
}
private static ConnectionState CreateConnection(
string connectionId = "conn-1",
InstanceHealthStatus status = InstanceHealthStatus.Healthy)
{
return new ConnectionState
{
ConnectionId = connectionId,
Instance = new InstanceDescriptor
{
InstanceId = $"inst-{connectionId}",
ServiceName = "test-service",
Version = "1.0.0",
Region = "us-east-1"
},
Status = status,
TransportType = TransportType.InMemory
};
}
private static RoutingDecision CreateDecision(
EndpointDescriptor? endpoint = null,
ConnectionState? connection = null)
{
return new RoutingDecision
{
Endpoint = endpoint ?? CreateEndpoint(),
Connection = connection ?? CreateConnection(),
TransportType = TransportType.InMemory,
EffectiveTimeout = TimeSpan.FromSeconds(30)
};
}
#region Missing Endpoint Tests
[Fact]
public async Task Invoke_WithNoEndpoint_Returns500()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(endpoint: null);
// Act
await middleware.Invoke(
context,
_routingPluginMock.Object,
_routingStateMock.Object,
Options.Create(_gatewayConfig),
Options.Create(_routingOptions));
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status500InternalServerError);
}
[Fact]
public async Task Invoke_WithNoEndpoint_WritesErrorResponse()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(endpoint: null);
// Act
await middleware.Invoke(
context,
_routingPluginMock.Object,
_routingStateMock.Object,
Options.Create(_gatewayConfig),
Options.Create(_routingOptions));
// Assert
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Contain("descriptor missing");
}
#endregion
#region Available Instance Tests
[Fact]
public async Task Invoke_WithAvailableInstance_SetsRoutingDecision()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var connection = CreateConnection();
var decision = CreateDecision(endpoint, connection);
var context = CreateHttpContext(endpoint: endpoint);
_routingStateMock.Setup(r => r.GetConnectionsFor(
endpoint.ServiceName, endpoint.Version, endpoint.Method, endpoint.Path))
.Returns([connection]);
_routingPluginMock.Setup(p => p.ChooseInstanceAsync(
It.IsAny<RoutingContext>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(decision);
// Act
await middleware.Invoke(
context,
_routingPluginMock.Object,
_routingStateMock.Object,
Options.Create(_gatewayConfig),
Options.Create(_routingOptions));
// Assert
_nextCalled.Should().BeTrue();
context.Items[RouterHttpContextKeys.RoutingDecision].Should().Be(decision);
}
[Fact]
public async Task Invoke_WithAvailableInstance_CallsNext()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var decision = CreateDecision(endpoint);
var context = CreateHttpContext(endpoint: endpoint);
_routingStateMock.Setup(r => r.GetConnectionsFor(
It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>()))
.Returns([CreateConnection()]);
_routingPluginMock.Setup(p => p.ChooseInstanceAsync(
It.IsAny<RoutingContext>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(decision);
// Act
await middleware.Invoke(
context,
_routingPluginMock.Object,
_routingStateMock.Object,
Options.Create(_gatewayConfig),
Options.Create(_routingOptions));
// Assert
_nextCalled.Should().BeTrue();
}
#endregion
#region No Instances Tests
[Fact]
public async Task Invoke_WithNoInstances_Returns503()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var context = CreateHttpContext(endpoint: endpoint);
_routingStateMock.Setup(r => r.GetConnectionsFor(
It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>()))
.Returns([]);
_routingPluginMock.Setup(p => p.ChooseInstanceAsync(
It.IsAny<RoutingContext>(), It.IsAny<CancellationToken>()))
.ReturnsAsync((RoutingDecision?)null);
// Act
await middleware.Invoke(
context,
_routingPluginMock.Object,
_routingStateMock.Object,
Options.Create(_gatewayConfig),
Options.Create(_routingOptions));
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status503ServiceUnavailable);
}
[Fact]
public async Task Invoke_WithNoInstances_WritesErrorResponse()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var context = CreateHttpContext(endpoint: endpoint);
_routingStateMock.Setup(r => r.GetConnectionsFor(
It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>()))
.Returns([]);
_routingPluginMock.Setup(p => p.ChooseInstanceAsync(
It.IsAny<RoutingContext>(), It.IsAny<CancellationToken>()))
.ReturnsAsync((RoutingDecision?)null);
// Act
await middleware.Invoke(
context,
_routingPluginMock.Object,
_routingStateMock.Object,
Options.Create(_gatewayConfig),
Options.Create(_routingOptions));
// Assert
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Contain("No instances available");
responseBody.Should().Contain("test-service");
}
#endregion
#region Routing Context Tests
[Fact]
public async Task Invoke_PassesCorrectRoutingContext()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var decision = CreateDecision(endpoint);
var connection = CreateConnection();
var context = CreateHttpContext(endpoint: endpoint);
_routingStateMock.Setup(r => r.GetConnectionsFor(
endpoint.ServiceName, endpoint.Version, endpoint.Method, endpoint.Path))
.Returns([connection]);
RoutingContext? capturedContext = null;
_routingPluginMock.Setup(p => p.ChooseInstanceAsync(
It.IsAny<RoutingContext>(), It.IsAny<CancellationToken>()))
.Callback<RoutingContext, CancellationToken>((ctx, _) => capturedContext = ctx)
.ReturnsAsync(decision);
// Act
await middleware.Invoke(
context,
_routingPluginMock.Object,
_routingStateMock.Object,
Options.Create(_gatewayConfig),
Options.Create(_routingOptions));
// Assert
capturedContext.Should().NotBeNull();
capturedContext!.Method.Should().Be("GET");
capturedContext.Path.Should().Be("/api/test");
capturedContext.GatewayRegion.Should().Be("us-east-1");
capturedContext.Endpoint.Should().Be(endpoint);
capturedContext.AvailableConnections.Should().ContainSingle();
}
[Fact]
public async Task Invoke_PassesRequestHeaders()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var decision = CreateDecision(endpoint);
var context = CreateHttpContext(endpoint: endpoint);
context.Request.Headers["X-Custom-Header"] = "CustomValue";
_routingStateMock.Setup(r => r.GetConnectionsFor(
It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>()))
.Returns([CreateConnection()]);
RoutingContext? capturedContext = null;
_routingPluginMock.Setup(p => p.ChooseInstanceAsync(
It.IsAny<RoutingContext>(), It.IsAny<CancellationToken>()))
.Callback<RoutingContext, CancellationToken>((ctx, _) => capturedContext = ctx)
.ReturnsAsync(decision);
// Act
await middleware.Invoke(
context,
_routingPluginMock.Object,
_routingStateMock.Object,
Options.Create(_gatewayConfig),
Options.Create(_routingOptions));
// Assert
capturedContext!.Headers.Should().ContainKey("X-Custom-Header");
capturedContext.Headers["X-Custom-Header"].Should().Be("CustomValue");
}
#endregion
#region Version Extraction Tests
[Fact]
public async Task Invoke_WithXApiVersionHeader_ExtractsVersion()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var decision = CreateDecision(endpoint);
var context = CreateHttpContext(endpoint: endpoint);
context.Request.Headers["X-Api-Version"] = "2.0.0";
_routingStateMock.Setup(r => r.GetConnectionsFor(
It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>()))
.Returns([CreateConnection()]);
RoutingContext? capturedContext = null;
_routingPluginMock.Setup(p => p.ChooseInstanceAsync(
It.IsAny<RoutingContext>(), It.IsAny<CancellationToken>()))
.Callback<RoutingContext, CancellationToken>((ctx, _) => capturedContext = ctx)
.ReturnsAsync(decision);
// Act
await middleware.Invoke(
context,
_routingPluginMock.Object,
_routingStateMock.Object,
Options.Create(_gatewayConfig),
Options.Create(_routingOptions));
// Assert
capturedContext!.RequestedVersion.Should().Be("2.0.0");
}
[Fact]
public async Task Invoke_WithNoVersionHeader_UsesDefault()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint();
var decision = CreateDecision(endpoint);
var context = CreateHttpContext(endpoint: endpoint);
_routingStateMock.Setup(r => r.GetConnectionsFor(
It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>()))
.Returns([CreateConnection()]);
RoutingContext? capturedContext = null;
_routingPluginMock.Setup(p => p.ChooseInstanceAsync(
It.IsAny<RoutingContext>(), It.IsAny<CancellationToken>()))
.Callback<RoutingContext, CancellationToken>((ctx, _) => capturedContext = ctx)
.ReturnsAsync(decision);
// Act
await middleware.Invoke(
context,
_routingPluginMock.Object,
_routingStateMock.Object,
Options.Create(_gatewayConfig),
Options.Create(_routingOptions));
// Assert
capturedContext!.RequestedVersion.Should().Be("1.0.0"); // From _routingOptions
}
#endregion
}

View File

@@ -11,6 +11,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<PrivateAssets>all</PrivateAssets>

View File

@@ -0,0 +1,786 @@
using System.Text;
using FluentAssertions;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging.Abstractions;
using Moq;
using StellaOps.Gateway.WebService.Middleware;
using StellaOps.Router.Common.Abstractions;
using StellaOps.Router.Common.Enums;
using StellaOps.Router.Common.Frames;
using StellaOps.Router.Common.Models;
using Xunit;
namespace StellaOps.Gateway.WebService.Tests;
/// <summary>
/// Unit tests for <see cref="TransportDispatchMiddleware"/>.
/// </summary>
public sealed class TransportDispatchMiddlewareTests
{
private readonly Mock<ITransportClient> _transportClientMock;
private readonly Mock<IGlobalRoutingState> _routingStateMock;
private readonly Mock<RequestDelegate> _nextMock;
private bool _nextCalled;
public TransportDispatchMiddlewareTests()
{
_transportClientMock = new Mock<ITransportClient>();
_routingStateMock = new Mock<IGlobalRoutingState>();
_nextMock = new Mock<RequestDelegate>();
_nextMock.Setup(n => n(It.IsAny<HttpContext>()))
.Callback(() => _nextCalled = true)
.Returns(Task.CompletedTask);
}
private TransportDispatchMiddleware CreateMiddleware()
{
return new TransportDispatchMiddleware(
_nextMock.Object,
NullLogger<TransportDispatchMiddleware>.Instance);
}
private static HttpContext CreateHttpContext(
RoutingDecision? decision = null,
string method = "GET",
string path = "/api/test",
byte[]? body = null)
{
var context = new DefaultHttpContext();
context.Request.Method = method;
context.Request.Path = path;
context.Response.Body = new MemoryStream();
if (body is not null)
{
context.Request.Body = new MemoryStream(body);
context.Request.ContentLength = body.Length;
}
else
{
context.Request.Body = new MemoryStream();
}
if (decision is not null)
{
context.Items[RouterHttpContextKeys.RoutingDecision] = decision;
}
return context;
}
private static EndpointDescriptor CreateEndpoint(
string serviceName = "test-service",
string version = "1.0.0",
bool supportsStreaming = false)
{
return new EndpointDescriptor
{
ServiceName = serviceName,
Version = version,
Method = "GET",
Path = "/api/test",
SupportsStreaming = supportsStreaming
};
}
private static ConnectionState CreateConnection(
string connectionId = "conn-1",
InstanceHealthStatus status = InstanceHealthStatus.Healthy)
{
return new ConnectionState
{
ConnectionId = connectionId,
Instance = new InstanceDescriptor
{
InstanceId = $"inst-{connectionId}",
ServiceName = "test-service",
Version = "1.0.0",
Region = "us-east-1"
},
Status = status,
TransportType = TransportType.InMemory
};
}
private static RoutingDecision CreateDecision(
EndpointDescriptor? endpoint = null,
ConnectionState? connection = null,
TimeSpan? timeout = null)
{
return new RoutingDecision
{
Endpoint = endpoint ?? CreateEndpoint(),
Connection = connection ?? CreateConnection(),
TransportType = TransportType.InMemory,
EffectiveTimeout = timeout ?? TimeSpan.FromSeconds(30)
};
}
private static Frame CreateResponseFrame(
string requestId = "test-request",
int statusCode = 200,
Dictionary<string, string>? headers = null,
byte[]? payload = null)
{
var response = new ResponseFrame
{
RequestId = requestId,
StatusCode = statusCode,
Headers = headers ?? new Dictionary<string, string>(),
Payload = payload ?? []
};
return FrameConverter.ToFrame(response);
}
#region Missing Routing Decision Tests
[Fact]
public async Task Invoke_WithNoRoutingDecision_Returns500()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(decision: null);
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
_nextCalled.Should().BeFalse();
context.Response.StatusCode.Should().Be(StatusCodes.Status500InternalServerError);
}
[Fact]
public async Task Invoke_WithNoRoutingDecision_WritesErrorResponse()
{
// Arrange
var middleware = CreateMiddleware();
var context = CreateHttpContext(decision: null);
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Contain("Routing decision missing");
}
#endregion
#region Successful Request/Response Tests
[Fact]
public async Task Invoke_WithSuccessfulResponse_ForwardsStatusCode()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var context = CreateHttpContext(decision: decision);
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync((ConnectionState conn, Frame req, TimeSpan timeout, CancellationToken ct) =>
{
var requestFrame = FrameConverter.ToRequestFrame(req);
return CreateResponseFrame(requestId: requestFrame!.RequestId, statusCode: 201);
});
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.StatusCode.Should().Be(201);
}
[Fact]
public async Task Invoke_WithResponsePayload_WritesToResponseBody()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var context = CreateHttpContext(decision: decision);
var responsePayload = Encoding.UTF8.GetBytes("{\"result\":\"success\"}");
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync((ConnectionState conn, Frame req, TimeSpan timeout, CancellationToken ct) =>
{
var requestFrame = FrameConverter.ToRequestFrame(req);
return CreateResponseFrame(requestId: requestFrame!.RequestId, payload: responsePayload);
});
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Be("{\"result\":\"success\"}");
}
[Fact]
public async Task Invoke_WithResponseHeaders_ForwardsHeaders()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var context = CreateHttpContext(decision: decision);
var responseHeaders = new Dictionary<string, string>
{
["X-Custom-Header"] = "CustomValue",
["Content-Type"] = "application/json"
};
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync((ConnectionState conn, Frame req, TimeSpan timeout, CancellationToken ct) =>
{
var requestFrame = FrameConverter.ToRequestFrame(req);
return CreateResponseFrame(requestId: requestFrame!.RequestId, headers: responseHeaders);
});
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.Headers.Should().ContainKey("X-Custom-Header");
context.Response.Headers["X-Custom-Header"].ToString().Should().Be("CustomValue");
context.Response.Headers["Content-Type"].ToString().Should().Be("application/json");
}
[Fact]
public async Task Invoke_WithTransferEncodingHeader_DoesNotForward()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var context = CreateHttpContext(decision: decision);
var responseHeaders = new Dictionary<string, string>
{
["Transfer-Encoding"] = "chunked",
["X-Custom-Header"] = "CustomValue"
};
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync((ConnectionState conn, Frame req, TimeSpan timeout, CancellationToken ct) =>
{
var requestFrame = FrameConverter.ToRequestFrame(req);
return CreateResponseFrame(requestId: requestFrame!.RequestId, headers: responseHeaders);
});
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.Headers.Should().NotContainKey("Transfer-Encoding");
context.Response.Headers.Should().ContainKey("X-Custom-Header");
}
[Fact]
public async Task Invoke_WithRequestBody_SendsBodyInFrame()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var requestBody = Encoding.UTF8.GetBytes("{\"data\":\"test\"}");
var context = CreateHttpContext(decision: decision, body: requestBody);
byte[]? capturedPayload = null;
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.Callback<ConnectionState, Frame, TimeSpan, CancellationToken>((conn, req, timeout, ct) =>
{
var requestFrame = FrameConverter.ToRequestFrame(req);
capturedPayload = requestFrame?.Payload.ToArray();
})
.ReturnsAsync((ConnectionState conn, Frame req, TimeSpan timeout, CancellationToken ct) =>
{
var requestFrame = FrameConverter.ToRequestFrame(req);
return CreateResponseFrame(requestId: requestFrame!.RequestId);
});
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
capturedPayload.Should().BeEquivalentTo(requestBody);
}
[Fact]
public async Task Invoke_WithRequestHeaders_ForwardsHeadersInFrame()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var context = CreateHttpContext(decision: decision);
context.Request.Headers["X-Request-Id"] = "req-123";
context.Request.Headers["Accept"] = "application/json";
IReadOnlyDictionary<string, string>? capturedHeaders = null;
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.Callback<ConnectionState, Frame, TimeSpan, CancellationToken>((conn, req, timeout, ct) =>
{
var requestFrame = FrameConverter.ToRequestFrame(req);
capturedHeaders = requestFrame?.Headers;
})
.ReturnsAsync((ConnectionState conn, Frame req, TimeSpan timeout, CancellationToken ct) =>
{
var requestFrame = FrameConverter.ToRequestFrame(req);
return CreateResponseFrame(requestId: requestFrame!.RequestId);
});
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
capturedHeaders.Should().NotBeNull();
capturedHeaders.Should().ContainKey("X-Request-Id");
capturedHeaders!["X-Request-Id"].Should().Be("req-123");
}
#endregion
#region Timeout Tests
[Fact]
public async Task Invoke_WithTimeout_Returns504()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision(timeout: TimeSpan.FromMilliseconds(50));
var context = CreateHttpContext(decision: decision);
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ThrowsAsync(new OperationCanceledException());
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.StatusCode.Should().Be(StatusCodes.Status504GatewayTimeout);
}
[Fact]
public async Task Invoke_WithTimeout_WritesErrorResponse()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision(timeout: TimeSpan.FromMilliseconds(50));
var context = CreateHttpContext(decision: decision);
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ThrowsAsync(new OperationCanceledException());
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Contain("Upstream timeout");
responseBody.Should().Contain("test-service");
}
[Fact]
public async Task Invoke_WithTimeout_SendsCancelFrame()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision(timeout: TimeSpan.FromMilliseconds(50));
var context = CreateHttpContext(decision: decision);
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ThrowsAsync(new OperationCanceledException());
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
_transportClientMock.Verify(t => t.SendCancelAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Guid>(),
CancelReasons.Timeout), Times.Once);
}
#endregion
#region Upstream Error Tests
[Fact]
public async Task Invoke_WithUpstreamError_Returns502()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var context = CreateHttpContext(decision: decision);
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ThrowsAsync(new InvalidOperationException("Connection failed"));
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.StatusCode.Should().Be(StatusCodes.Status502BadGateway);
}
[Fact]
public async Task Invoke_WithUpstreamError_WritesErrorResponse()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var context = CreateHttpContext(decision: decision);
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ThrowsAsync(new InvalidOperationException("Connection failed"));
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Contain("Upstream error");
responseBody.Should().Contain("Connection failed");
}
#endregion
#region Invalid Response Tests
[Fact]
public async Task Invoke_WithInvalidResponseFrame_Returns502()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var context = CreateHttpContext(decision: decision);
// Return a malformed frame that cannot be parsed as ResponseFrame
var invalidFrame = new Frame
{
Type = FrameType.Heartbeat, // Wrong type
CorrelationId = "test",
Payload = Array.Empty<byte>()
};
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync(invalidFrame);
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.StatusCode.Should().Be(StatusCodes.Status502BadGateway);
}
[Fact]
public async Task Invoke_WithInvalidResponseFrame_WritesErrorResponse()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var context = CreateHttpContext(decision: decision);
var invalidFrame = new Frame
{
Type = FrameType.Cancel, // Wrong type
CorrelationId = "test",
Payload = Array.Empty<byte>()
};
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync(invalidFrame);
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.Body.Seek(0, SeekOrigin.Begin);
using var reader = new StreamReader(context.Response.Body);
var responseBody = await reader.ReadToEndAsync();
responseBody.Should().Contain("Invalid upstream response");
}
#endregion
#region Connection Ping Update Tests
[Fact]
public async Task Invoke_WithSuccessfulResponse_UpdatesConnectionPing()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var context = CreateHttpContext(decision: decision);
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync((ConnectionState conn, Frame req, TimeSpan timeout, CancellationToken ct) =>
{
var requestFrame = FrameConverter.ToRequestFrame(req);
return CreateResponseFrame(requestId: requestFrame!.RequestId);
});
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
_routingStateMock.Verify(r => r.UpdateConnection(
"conn-1",
It.IsAny<Action<ConnectionState>>()), Times.Once);
}
#endregion
#region Streaming Tests
[Fact]
public async Task Invoke_WithStreamingEndpoint_UsesSendStreamingAsync()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint(supportsStreaming: true);
var decision = CreateDecision(endpoint: endpoint);
var context = CreateHttpContext(decision: decision);
_transportClientMock.Setup(t => t.SendStreamingAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<Stream>(),
It.IsAny<Func<Stream, Task>>(),
It.IsAny<PayloadLimits>(),
It.IsAny<CancellationToken>()))
.Callback<ConnectionState, Frame, Stream, Func<Stream, Task>, PayloadLimits, CancellationToken>(
async (conn, req, requestBody, readResponse, limits, ct) =>
{
// Simulate streaming response
using var responseStream = new MemoryStream(Encoding.UTF8.GetBytes("streamed data"));
await readResponse(responseStream);
})
.Returns(Task.CompletedTask);
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
_transportClientMock.Verify(t => t.SendStreamingAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<Stream>(),
It.IsAny<Func<Stream, Task>>(),
It.IsAny<PayloadLimits>(),
It.IsAny<CancellationToken>()), Times.Once);
}
[Fact]
public async Task Invoke_StreamingWithTimeout_Returns504()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint(supportsStreaming: true);
var decision = CreateDecision(endpoint: endpoint, timeout: TimeSpan.FromMilliseconds(50));
var context = CreateHttpContext(decision: decision);
_transportClientMock.Setup(t => t.SendStreamingAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<Stream>(),
It.IsAny<Func<Stream, Task>>(),
It.IsAny<PayloadLimits>(),
It.IsAny<CancellationToken>()))
.ThrowsAsync(new OperationCanceledException());
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.StatusCode.Should().Be(StatusCodes.Status504GatewayTimeout);
}
[Fact]
public async Task Invoke_StreamingWithUpstreamError_Returns502()
{
// Arrange
var middleware = CreateMiddleware();
var endpoint = CreateEndpoint(supportsStreaming: true);
var decision = CreateDecision(endpoint: endpoint);
var context = CreateHttpContext(decision: decision);
_transportClientMock.Setup(t => t.SendStreamingAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<Stream>(),
It.IsAny<Func<Stream, Task>>(),
It.IsAny<PayloadLimits>(),
It.IsAny<CancellationToken>()))
.ThrowsAsync(new InvalidOperationException("Streaming failed"));
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
context.Response.StatusCode.Should().Be(StatusCodes.Status502BadGateway);
}
#endregion
#region Query String Tests
[Fact]
public async Task Invoke_WithQueryString_IncludesInRequestPath()
{
// Arrange
var middleware = CreateMiddleware();
var decision = CreateDecision();
var context = CreateHttpContext(decision: decision, path: "/api/test");
context.Request.QueryString = new QueryString("?key=value&other=123");
string? capturedPath = null;
_transportClientMock.Setup(t => t.SendRequestAsync(
It.IsAny<ConnectionState>(),
It.IsAny<Frame>(),
It.IsAny<TimeSpan>(),
It.IsAny<CancellationToken>()))
.Callback<ConnectionState, Frame, TimeSpan, CancellationToken>((conn, req, timeout, ct) =>
{
var requestFrame = FrameConverter.ToRequestFrame(req);
capturedPath = requestFrame?.Path;
})
.ReturnsAsync((ConnectionState conn, Frame req, TimeSpan timeout, CancellationToken ct) =>
{
var requestFrame = FrameConverter.ToRequestFrame(req);
return CreateResponseFrame(requestId: requestFrame!.RequestId);
});
// Act
await middleware.Invoke(
context,
_transportClientMock.Object,
_routingStateMock.Object);
// Assert
capturedPath.Should().Be("/api/test?key=value&other=123");
}
#endregion
}

View File

@@ -69,8 +69,10 @@ public sealed record AuditEntry(
{
/// <summary>
/// Creates a new audit entry with computed hash.
/// Uses the platform's compliance-aware crypto abstraction.
/// </summary>
public static AuditEntry Create(
CanonicalJsonHasher hasher,
string tenantId,
AuditEventType eventType,
string resourceType,
@@ -89,12 +91,14 @@ public sealed record AuditEntry(
long sequenceNumber = 0,
string? metadata = null)
{
ArgumentNullException.ThrowIfNull(hasher);
var entryId = Guid.NewGuid();
var occurredAt = DateTimeOffset.UtcNow;
// Compute canonical hash from immutable content
// Use the same property names and fields as VerifyIntegrity to keep the hash stable.
var contentHash = CanonicalJsonHasher.ComputeCanonicalSha256(new
var contentHash = hasher.ComputeCanonicalHash(new
{
EntryId = entryId,
TenantId = tenantId,
@@ -135,10 +139,13 @@ public sealed record AuditEntry(
/// <summary>
/// Verifies the integrity of this entry's content hash.
/// Uses the platform's compliance-aware crypto abstraction.
/// </summary>
public bool VerifyIntegrity()
public bool VerifyIntegrity(CanonicalJsonHasher hasher)
{
var computed = CanonicalJsonHasher.ComputeCanonicalSha256(new
ArgumentNullException.ThrowIfNull(hasher);
var computed = hasher.ComputeCanonicalHash(new
{
EntryId,
TenantId,
@@ -169,12 +176,6 @@ public sealed record AuditEntry(
return string.Equals(PreviousEntryHash, previousEntry.ContentHash, StringComparison.OrdinalIgnoreCase);
}
private static string ComputeSha256(string content)
{
var bytes = System.Text.Encoding.UTF8.GetBytes(content);
var hash = System.Security.Cryptography.SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
/// <summary>

View File

@@ -1,7 +1,7 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core.Hashing;
namespace StellaOps.Orchestrator.Core.Domain.Events;
@@ -178,13 +178,18 @@ public sealed record EventEnvelope(
}
}
/// <summary>Computes a digest of the envelope for signing.</summary>
public string ComputeDigest()
/// <summary>
/// Computes a digest of the envelope for signing.
/// Uses the platform's compliance-aware crypto abstraction.
/// </summary>
public string ComputeDigest(ICryptoHash cryptoHash)
{
ArgumentNullException.ThrowIfNull(cryptoHash);
var canonicalJson = CanonicalJsonHasher.ToCanonicalJson(new { envelope = this });
var bytes = Encoding.UTF8.GetBytes(canonicalJson);
var hash = SHA256.HashData(bytes);
return $"sha256:{Convert.ToHexStringLower(hash)}";
var hash = cryptoHash.ComputePrefixedHashForPurpose(bytes, HashPurpose.Content);
return hash;
}
private static readonly JsonSerializerOptions JsonOptions = new()

View File

@@ -1,5 +1,5 @@
using System.Security.Cryptography;
using System.Text;
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core.Hashing;
namespace StellaOps.Orchestrator.Core.Domain;
@@ -44,8 +44,10 @@ public sealed record PackRunLog(
{
/// <summary>
/// Creates a new log entry.
/// Uses the platform's compliance-aware crypto abstraction.
/// </summary>
public static PackRunLog Create(
ICryptoHash cryptoHash,
Guid packRunId,
string tenantId,
long sequence,
@@ -55,7 +57,9 @@ public sealed record PackRunLog(
string? data = null,
DateTimeOffset? timestamp = null)
{
var (digest, sizeBytes) = ComputeDigest(message, data, tenantId, packRunId, sequence, level, source);
ArgumentNullException.ThrowIfNull(cryptoHash);
var (digest, sizeBytes) = ComputeDigest(cryptoHash, message, data, tenantId, packRunId, sequence, level, source);
return new PackRunLog(
LogId: Guid.NewGuid(),
@@ -75,32 +79,35 @@ public sealed record PackRunLog(
/// Creates an info-level stdout log entry.
/// </summary>
public static PackRunLog Stdout(
ICryptoHash cryptoHash,
Guid packRunId,
string tenantId,
long sequence,
string message,
DateTimeOffset? timestamp = null)
{
return Create(packRunId, tenantId, sequence, LogLevel.Info, "stdout", message, null, timestamp);
return Create(cryptoHash, packRunId, tenantId, sequence, LogLevel.Info, "stdout", message, null, timestamp);
}
/// <summary>
/// Creates a warn-level stderr log entry.
/// </summary>
public static PackRunLog Stderr(
ICryptoHash cryptoHash,
Guid packRunId,
string tenantId,
long sequence,
string message,
DateTimeOffset? timestamp = null)
{
return Create(packRunId, tenantId, sequence, LogLevel.Warn, "stderr", message, null, timestamp);
return Create(cryptoHash, packRunId, tenantId, sequence, LogLevel.Warn, "stderr", message, null, timestamp);
}
/// <summary>
/// Creates a system-level log entry (lifecycle events).
/// </summary>
public static PackRunLog System(
ICryptoHash cryptoHash,
Guid packRunId,
string tenantId,
long sequence,
@@ -109,10 +116,11 @@ public sealed record PackRunLog(
string? data = null,
DateTimeOffset? timestamp = null)
{
return Create(packRunId, tenantId, sequence, level, "system", message, data, timestamp);
return Create(cryptoHash, packRunId, tenantId, sequence, level, "system", message, data, timestamp);
}
private static (string Digest, long SizeBytes) ComputeDigest(
ICryptoHash cryptoHash,
string message,
string? data,
string tenantId,
@@ -134,9 +142,9 @@ public sealed record PackRunLog(
var canonicalJson = CanonicalJsonHasher.ToCanonicalJson(payload);
var bytes = Encoding.UTF8.GetBytes(canonicalJson);
var hash = SHA256.HashData(bytes);
var hash = cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
return (Convert.ToHexString(hash).ToLowerInvariant(), bytes.LongLength);
return (hash, bytes.LongLength);
}
}

View File

@@ -18,15 +18,17 @@ public sealed record ReplayInputsLock(
public static ReplayInputsLock Create(
ReplayManifest manifest,
CanonicalJsonHasher hasher,
string? notes = null,
DateTimeOffset? createdAt = null,
string schemaVersion = DefaultSchemaVersion)
{
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(hasher);
return new ReplayInputsLock(
SchemaVersion: schemaVersion,
ManifestHash: manifest.ComputeHash(),
ManifestHash: manifest.ComputeHash(hasher),
CreatedAt: createdAt ?? DateTimeOffset.UtcNow,
Inputs: manifest.Inputs,
Notes: string.IsNullOrWhiteSpace(notes) ? null : notes);
@@ -34,6 +36,11 @@ public sealed record ReplayInputsLock(
/// <summary>
/// Canonical hash of the lock content.
/// Uses the platform's compliance-aware crypto abstraction.
/// </summary>
public string ComputeHash() => CanonicalJsonHasher.ComputeCanonicalSha256(this);
public string ComputeHash(CanonicalJsonHasher hasher)
{
ArgumentNullException.ThrowIfNull(hasher);
return hasher.ComputeCanonicalHash(this);
}
}

View File

@@ -41,9 +41,14 @@ public sealed record ReplayManifest(
}
/// <summary>
/// Deterministic SHA-256 over canonical JSON representation of the manifest.
/// Deterministic hash over canonical JSON representation of the manifest.
/// Uses the platform's compliance-aware crypto abstraction.
/// </summary>
public string ComputeHash() => CanonicalJsonHasher.ComputeCanonicalSha256(this);
public string ComputeHash(CanonicalJsonHasher hasher)
{
ArgumentNullException.ThrowIfNull(hasher);
return hasher.ComputeCanonicalHash(this);
}
}
public sealed record ReplayInputs(

View File

@@ -1,17 +1,20 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
using System.Text.Json.Serialization;
using StellaOps.Cryptography;
namespace StellaOps.Orchestrator.Core.Hashing;
/// <summary>
/// Produces deterministic, canonical JSON and hashes for orchestrator payloads (events, audit, manifests).
/// Keys are sorted lexicographically; arrays preserve order; nulls are retained; timestamps remain ISO 8601 with offsets.
/// Uses compliance-profile-aware hashing via <see cref="ICryptoHash"/>.
/// </summary>
public static class CanonicalJsonHasher
public sealed class CanonicalJsonHasher
{
private readonly ICryptoHash _cryptoHash;
private static readonly JsonSerializerOptions SerializerOptions = new()
{
DefaultIgnoreCondition = JsonIgnoreCondition.Never,
@@ -20,6 +23,15 @@ public static class CanonicalJsonHasher
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
};
/// <summary>
/// Creates a new CanonicalJsonHasher with the specified crypto hash service.
/// </summary>
/// <param name="cryptoHash">Crypto hash service for compliance-aware hashing.</param>
public CanonicalJsonHasher(ICryptoHash cryptoHash)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
}
/// <summary>
/// Serialize the value to canonical JSON (sorted object keys, stable formatting).
/// </summary>
@@ -32,14 +44,14 @@ public static class CanonicalJsonHasher
}
/// <summary>
/// Compute SHA-256 over canonical JSON (lowercase hex).
/// Compute hash over canonical JSON using the active compliance profile (lowercase hex).
/// Uses <see cref="HashPurpose.Content"/> for content hashing.
/// </summary>
public static string ComputeCanonicalSha256<T>(T value)
public string ComputeCanonicalHash<T>(T value)
{
var canonicalJson = ToCanonicalJson(value);
var bytes = Encoding.UTF8.GetBytes(canonicalJson);
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
return _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
}
private static JsonNode OrderNode(JsonNode node)

View File

@@ -2,11 +2,21 @@ using StellaOps.Orchestrator.Core.Domain.Events;
namespace StellaOps.Orchestrator.Core.Hashing;
public static class EventEnvelopeHasher
/// <summary>
/// Computes compliance-aware hashes for event envelopes using the platform's crypto abstraction.
/// </summary>
public sealed class EventEnvelopeHasher
{
public static string Compute(EventEnvelope envelope)
private readonly CanonicalJsonHasher _hasher;
public EventEnvelopeHasher(CanonicalJsonHasher hasher)
{
_hasher = hasher ?? throw new ArgumentNullException(nameof(hasher));
}
public string Compute(EventEnvelope envelope)
{
ArgumentNullException.ThrowIfNull(envelope);
return CanonicalJsonHasher.ComputeCanonicalSha256(envelope);
return _hasher.ComputeCanonicalHash(envelope);
}
}

View File

@@ -17,4 +17,8 @@
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>

View File

@@ -2,6 +2,7 @@ using System.Text;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Core.Hashing;
using StellaOps.Orchestrator.Infrastructure.Repositories;
namespace StellaOps.Orchestrator.Infrastructure.Postgres;
@@ -61,13 +62,16 @@ public sealed class PostgresAuditRepository : IAuditRepository
""";
private readonly OrchestratorDataSource _dataSource;
private readonly CanonicalJsonHasher _hasher;
private readonly ILogger<PostgresAuditRepository> _logger;
public PostgresAuditRepository(
OrchestratorDataSource dataSource,
CanonicalJsonHasher hasher,
ILogger<PostgresAuditRepository> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_hasher = hasher ?? throw new ArgumentNullException(nameof(hasher));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
@@ -115,6 +119,7 @@ public sealed class PostgresAuditRepository : IAuditRepository
// Create the entry
var entry = AuditEntry.Create(
hasher: _hasher,
tenantId: tenantId,
eventType: eventType,
resourceType: resourceType,

View File

@@ -1,4 +1,6 @@
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Core.Hashing;
namespace StellaOps.Orchestrator.Tests.AuditLedger;
@@ -7,6 +9,13 @@ namespace StellaOps.Orchestrator.Tests.AuditLedger;
/// </summary>
public sealed class AuditEntryTests
{
private readonly ICryptoHash _cryptoHash = DefaultCryptoHash.CreateForTests();
private readonly CanonicalJsonHasher _hasher;
public AuditEntryTests()
{
_hasher = new CanonicalJsonHasher(_cryptoHash);
}
[Fact]
public void Create_WithValidParameters_SetsAllProperties()
{
@@ -16,6 +25,7 @@ public sealed class AuditEntryTests
// Act
var entry = AuditEntry.Create(
hasher: _hasher,
tenantId: tenantId,
eventType: AuditEventType.JobCreated,
resourceType: "job",
@@ -62,6 +72,7 @@ public sealed class AuditEntryTests
{
// Arrange & Act
var entry = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.RunCreated,
resourceType: "run",
@@ -82,6 +93,7 @@ public sealed class AuditEntryTests
{
// Arrange
var entry = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.SourceCreated,
resourceType: "source",
@@ -92,7 +104,7 @@ public sealed class AuditEntryTests
sequenceNumber: 5);
// Act
var isValid = entry.VerifyIntegrity();
var isValid = entry.VerifyIntegrity(_hasher);
// Assert
Assert.True(isValid);
@@ -103,6 +115,7 @@ public sealed class AuditEntryTests
{
// Arrange
var entry = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.QuotaCreated,
resourceType: "quota",
@@ -116,7 +129,7 @@ public sealed class AuditEntryTests
var tamperedEntry = entry with { Description = "Tampered description" };
// Act
var isValid = tamperedEntry.VerifyIntegrity();
var isValid = tamperedEntry.VerifyIntegrity(_hasher);
// Assert
Assert.False(isValid);
@@ -127,6 +140,7 @@ public sealed class AuditEntryTests
{
// Arrange
var entry = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.JobScheduled,
resourceType: "job",
@@ -149,6 +163,7 @@ public sealed class AuditEntryTests
{
// Arrange
var first = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.JobCreated,
resourceType: "job",
@@ -160,6 +175,7 @@ public sealed class AuditEntryTests
sequenceNumber: 1);
var second = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.JobLeased,
resourceType: "job",
@@ -182,6 +198,7 @@ public sealed class AuditEntryTests
{
// Arrange
var first = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.JobCreated,
resourceType: "job",
@@ -193,6 +210,7 @@ public sealed class AuditEntryTests
sequenceNumber: 1);
var second = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.JobCompleted,
resourceType: "job",
@@ -225,6 +243,7 @@ public sealed class AuditEntryTests
{
// Act
var entry = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: eventType,
resourceType: resourceType,
@@ -237,7 +256,7 @@ public sealed class AuditEntryTests
// Assert
Assert.Equal(eventType, entry.EventType);
Assert.Equal(resourceType, entry.ResourceType);
Assert.True(entry.VerifyIntegrity());
Assert.True(entry.VerifyIntegrity(_hasher));
}
[Theory]
@@ -251,6 +270,7 @@ public sealed class AuditEntryTests
{
// Act
var entry = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.JobCreated,
resourceType: "job",
@@ -262,7 +282,7 @@ public sealed class AuditEntryTests
// Assert
Assert.Equal(actorType, entry.ActorType);
Assert.True(entry.VerifyIntegrity());
Assert.True(entry.VerifyIntegrity(_hasher));
}
[Fact]
@@ -274,6 +294,7 @@ public sealed class AuditEntryTests
// Act
var entry = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.JobLeased,
resourceType: "job",
@@ -295,6 +316,7 @@ public sealed class AuditEntryTests
{
// Act
var entry1 = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.JobCreated,
resourceType: "job",
@@ -305,6 +327,7 @@ public sealed class AuditEntryTests
sequenceNumber: 1);
var entry2 = AuditEntry.Create(
hasher: _hasher,
tenantId: "test-tenant",
eventType: AuditEventType.JobCreated,
resourceType: "job",

View File

@@ -1,4 +1,5 @@
using System.Text.Json;
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Core.Hashing;
@@ -6,14 +7,22 @@ namespace StellaOps.Orchestrator.Tests;
public class CanonicalJsonHasherTests
{
private readonly ICryptoHash _cryptoHash = DefaultCryptoHash.CreateForTests();
private readonly CanonicalJsonHasher _hasher;
public CanonicalJsonHasherTests()
{
_hasher = new CanonicalJsonHasher(_cryptoHash);
}
[Fact]
public void ProducesStableHash_WhenObjectPropertyOrderDiffers()
{
var first = new { b = 1, a = 2 };
var second = new { a = 2, b = 1 };
var firstHash = CanonicalJsonHasher.ComputeCanonicalSha256(first);
var secondHash = CanonicalJsonHasher.ComputeCanonicalSha256(second);
var firstHash = _hasher.ComputeCanonicalHash(first);
var secondHash = _hasher.ComputeCanonicalHash(second);
Assert.Equal(firstHash, secondHash);
}
@@ -37,6 +46,7 @@ public class CanonicalJsonHasherTests
public void AuditEntry_UsesCanonicalHash()
{
var entry = AuditEntry.Create(
hasher: _hasher,
tenantId: "tenant-1",
eventType: AuditEventType.JobCreated,
resourceType: "job",
@@ -45,10 +55,10 @@ public class CanonicalJsonHasherTests
actorType: ActorType.User,
description: "created job");
Assert.True(entry.VerifyIntegrity());
Assert.True(entry.VerifyIntegrity(_hasher));
// Changing description should invalidate hash
var tampered = entry with { Description = "tampered" };
Assert.False(tampered.VerifyIntegrity());
Assert.False(tampered.VerifyIntegrity(_hasher));
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Immutable;
using System.Text.Json;
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core;
using StellaOps.Orchestrator.Core.Hashing;
@@ -7,6 +8,15 @@ namespace StellaOps.Orchestrator.Tests;
public class EventEnvelopeTests
{
private readonly ICryptoHash _cryptoHash = DefaultCryptoHash.CreateForTests();
private readonly CanonicalJsonHasher _hasher;
private readonly EventEnvelopeHasher _envelopeHasher;
public EventEnvelopeTests()
{
_hasher = new CanonicalJsonHasher(_cryptoHash);
_envelopeHasher = new EventEnvelopeHasher(_hasher);
}
[Fact]
public void ComputeIdempotencyKey_IsDeterministicAndLowercase()
{
@@ -83,8 +93,8 @@ public class EventEnvelopeTests
eventId: "evt-fixed",
idempotencyKey: "fixed-key");
var hash1 = EventEnvelopeHasher.Compute(envelope);
var hash2 = EventEnvelopeHasher.Compute(envelope);
var hash1 = _envelopeHasher.Compute(envelope);
var hash2 = _envelopeHasher.Compute(envelope);
Assert.Equal(hash1, hash2);
Assert.Equal(64, hash1.Length);

View File

@@ -1,5 +1,6 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core.Domain.Events;
using StellaOps.Orchestrator.Infrastructure.Events;
@@ -11,6 +12,7 @@ namespace StellaOps.Orchestrator.Tests.Events;
public class EventPublishingTests
{
private static readonly CancellationToken CT = CancellationToken.None;
private readonly ICryptoHash _cryptoHash = DefaultCryptoHash.CreateForTests();
#region EventEnvelope Tests
@@ -144,7 +146,7 @@ public class EventPublishingTests
tenantId: "tenant-1",
actor: actor);
var digest = envelope.ComputeDigest();
var digest = envelope.ComputeDigest(_cryptoHash);
Assert.StartsWith("sha256:", digest);
Assert.Equal(64 + 7, digest.Length); // "sha256:" + 64 hex chars

View File

@@ -1,3 +1,4 @@
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.WebService.Contracts;
@@ -5,6 +6,7 @@ namespace StellaOps.Orchestrator.Tests.PackRun;
public sealed class PackRunContractTests
{
private readonly ICryptoHash _cryptoHash = DefaultCryptoHash.CreateForTests();
[Fact]
public void PackRunResponse_FromDomain_MapsAllFields()
{
@@ -88,6 +90,7 @@ public sealed class PackRunContractTests
var now = DateTimeOffset.UtcNow;
var log = PackRunLog.Create(
cryptoHash: _cryptoHash,
packRunId: packRunId,
tenantId: "tenant-1",
sequence: 42,
@@ -121,6 +124,7 @@ public sealed class PackRunContractTests
public void LogEntryResponse_FromDomain_LevelIsLowercase(LogLevel level, string expectedLevelString)
{
var log = PackRunLog.Create(
cryptoHash: _cryptoHash,
packRunId: Guid.NewGuid(),
tenantId: "t1",
sequence: 0,

View File

@@ -1,3 +1,4 @@
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core.Domain;
namespace StellaOps.Orchestrator.Tests.PackRun;
@@ -6,6 +7,7 @@ public sealed class PackRunLogTests
{
private const string TestTenantId = "tenant-test";
private readonly Guid _packRunId = Guid.NewGuid();
private readonly ICryptoHash _cryptoHash = DefaultCryptoHash.CreateForTests();
[Fact]
public void Create_InitializesAllFields()
@@ -13,6 +15,7 @@ public sealed class PackRunLogTests
var now = DateTimeOffset.UtcNow;
var log = PackRunLog.Create(
cryptoHash: _cryptoHash,
packRunId: _packRunId,
tenantId: TestTenantId,
sequence: 5,
@@ -41,6 +44,7 @@ public sealed class PackRunLogTests
var beforeCreate = DateTimeOffset.UtcNow;
var log = PackRunLog.Create(
cryptoHash: _cryptoHash,
packRunId: _packRunId,
tenantId: TestTenantId,
sequence: 0,
@@ -59,7 +63,7 @@ public sealed class PackRunLogTests
{
var now = DateTimeOffset.UtcNow;
var log = PackRunLog.Stdout(_packRunId, TestTenantId, 10, "Hello stdout", now);
var log = PackRunLog.Stdout(_cryptoHash, _packRunId, TestTenantId, 10, "Hello stdout", now);
Assert.Equal(LogLevel.Info, log.Level);
Assert.Equal("stdout", log.Source);
@@ -73,7 +77,7 @@ public sealed class PackRunLogTests
{
var now = DateTimeOffset.UtcNow;
var log = PackRunLog.Stderr(_packRunId, TestTenantId, 20, "Warning message", now);
var log = PackRunLog.Stderr(_cryptoHash, _packRunId, TestTenantId, 20, "Warning message", now);
Assert.Equal(LogLevel.Warn, log.Level);
Assert.Equal("stderr", log.Source);
@@ -86,7 +90,7 @@ public sealed class PackRunLogTests
{
var now = DateTimeOffset.UtcNow;
var log = PackRunLog.System(_packRunId, TestTenantId, 30, LogLevel.Error, "System error", "{\"code\":500}", now);
var log = PackRunLog.System(_cryptoHash, _packRunId, TestTenantId, 30, LogLevel.Error, "System error", "{\"code\":500}", now);
Assert.Equal(LogLevel.Error, log.Level);
Assert.Equal("system", log.Source);
@@ -112,6 +116,7 @@ public sealed class PackRunLogBatchTests
{
private const string TestTenantId = "tenant-test";
private readonly Guid _packRunId = Guid.NewGuid();
private readonly ICryptoHash _cryptoHash = DefaultCryptoHash.CreateForTests();
[Fact]
public void FromLogs_EmptyList_ReturnsEmptyBatch()
@@ -130,9 +135,9 @@ public sealed class PackRunLogBatchTests
{
var logs = new List<PackRunLog>
{
PackRunLog.Create(_packRunId, TestTenantId, 5, LogLevel.Info, "src", "msg1"),
PackRunLog.Create(_packRunId, TestTenantId, 6, LogLevel.Info, "src", "msg2"),
PackRunLog.Create(_packRunId, TestTenantId, 7, LogLevel.Info, "src", "msg3")
PackRunLog.Create(_cryptoHash, _packRunId, TestTenantId, 5, LogLevel.Info, "src", "msg1"),
PackRunLog.Create(_cryptoHash, _packRunId, TestTenantId, 6, LogLevel.Info, "src", "msg2"),
PackRunLog.Create(_cryptoHash, _packRunId, TestTenantId, 7, LogLevel.Info, "src", "msg3")
};
var batch = PackRunLogBatch.FromLogs(_packRunId, TestTenantId, logs);
@@ -151,8 +156,8 @@ public sealed class PackRunLogBatchTests
StartSequence: 100,
Logs:
[
PackRunLog.Create(_packRunId, TestTenantId, 100, LogLevel.Info, "src", "msg1"),
PackRunLog.Create(_packRunId, TestTenantId, 101, LogLevel.Info, "src", "msg2")
PackRunLog.Create(_cryptoHash, _packRunId, TestTenantId, 100, LogLevel.Info, "src", "msg1"),
PackRunLog.Create(_cryptoHash, _packRunId, TestTenantId, 101, LogLevel.Info, "src", "msg2")
]);
Assert.Equal(102, batch.NextSequence);

View File

@@ -1,10 +1,20 @@
using System.Collections.Immutable;
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core.Domain.Replay;
using StellaOps.Orchestrator.Core.Hashing;
namespace StellaOps.Orchestrator.Tests;
public class ReplayInputsLockTests
{
private readonly ICryptoHash _cryptoHash = DefaultCryptoHash.CreateForTests();
private readonly CanonicalJsonHasher _hasher;
public ReplayInputsLockTests()
{
_hasher = new CanonicalJsonHasher(_cryptoHash);
}
[Fact]
public void ReplayInputsLock_ComputesStableHash()
{
@@ -22,10 +32,10 @@ public class ReplayInputsLockTests
artifacts: null,
createdAt: new DateTimeOffset(2025, 01, 01, 0, 0, 0, TimeSpan.Zero));
var lock1 = ReplayInputsLock.Create(manifest, createdAt: new DateTimeOffset(2025, 01, 01, 0, 0, 5, TimeSpan.Zero));
var lock2 = ReplayInputsLock.Create(manifest, createdAt: new DateTimeOffset(2025, 01, 01, 0, 0, 5, TimeSpan.Zero));
var lock1 = ReplayInputsLock.Create(manifest, _hasher, createdAt: new DateTimeOffset(2025, 01, 01, 0, 0, 5, TimeSpan.Zero));
var lock2 = ReplayInputsLock.Create(manifest, _hasher, createdAt: new DateTimeOffset(2025, 01, 01, 0, 0, 5, TimeSpan.Zero));
Assert.Equal(lock1.ComputeHash(), lock2.ComputeHash());
Assert.Equal(lock1.ComputeHash(_hasher), lock2.ComputeHash(_hasher));
}
[Fact]
@@ -43,8 +53,8 @@ public class ReplayInputsLockTests
TimeSource: ReplayTimeSource.wall,
Env: ImmutableDictionary<string, string>.Empty));
var inputsLock = ReplayInputsLock.Create(manifest);
var inputsLock = ReplayInputsLock.Create(manifest, _hasher);
Assert.Equal(manifest.ComputeHash(), inputsLock.ManifestHash);
Assert.Equal(manifest.ComputeHash(_hasher), inputsLock.ManifestHash);
}
}

View File

@@ -1,10 +1,20 @@
using System.Collections.Immutable;
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core.Domain.Replay;
using StellaOps.Orchestrator.Core.Hashing;
namespace StellaOps.Orchestrator.Tests;
public class ReplayManifestTests
{
private readonly ICryptoHash _cryptoHash = DefaultCryptoHash.CreateForTests();
private readonly CanonicalJsonHasher _hasher;
public ReplayManifestTests()
{
_hasher = new CanonicalJsonHasher(_cryptoHash);
}
[Fact]
public void ComputeHash_IsStableWithCanonicalOrdering()
{
@@ -31,8 +41,8 @@ public class ReplayManifestTests
artifacts: new[] { new ReplayArtifact("ledger.ndjson", "sha256:abc", "application/x-ndjson") },
createdAt: new DateTimeOffset(2025, 12, 1, 0, 0, 0, TimeSpan.Zero));
var hashA = manifestA.ComputeHash();
var hashB = manifestB.ComputeHash();
var hashA = manifestA.ComputeHash(_hasher);
var hashB = manifestB.ComputeHash(_hasher);
Assert.Equal(hashA, hashB);
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Immutable;
using System.Text.Json;
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core;
using StellaOps.Orchestrator.Core.Hashing;
@@ -7,6 +8,14 @@ namespace StellaOps.Orchestrator.Tests;
public class SchemaSmokeTests
{
private readonly ICryptoHash _cryptoHash = DefaultCryptoHash.CreateForTests();
private readonly CanonicalJsonHasher _hasher;
public SchemaSmokeTests()
{
_hasher = new CanonicalJsonHasher(_cryptoHash);
}
[Theory]
[InlineData("event-envelope.schema.json")]
[InlineData("audit-bundle.schema.json")]
@@ -47,8 +56,8 @@ public class SchemaSmokeTests
eventId: "evt-1",
idempotencyKey: "fixed");
var hash1 = CanonicalJsonHasher.ComputeCanonicalSha256(envelope);
var hash2 = CanonicalJsonHasher.ComputeCanonicalSha256(envelope);
var hash1 = _hasher.ComputeCanonicalHash(envelope);
var hash2 = _hasher.ComputeCanonicalHash(envelope);
Assert.Equal(hash1, hash2);
Assert.Equal(64, hash1.Length);

View File

@@ -53,27 +53,27 @@
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/>
<PackageReference Include="xunit.v3" Version="3.0.0"/>
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/>
<PackageReference Include="xunit.v3" Version="3.0.0"/>
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/>
</ItemGroup>
@@ -117,12 +117,14 @@
<ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/>
<ProjectReference Include="..\StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj"/>
<ProjectReference Include="..\..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj"/>

View File

@@ -1,8 +1,8 @@
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Cryptography;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Core.Domain.Events;
using StellaOps.Orchestrator.Infrastructure;
@@ -102,6 +102,7 @@ public static class PackRunEndpoints
[FromServices] IPackRunRepository packRunRepository,
[FromServices] IQuotaRepository quotaRepository,
[FromServices] IEventPublisher eventPublisher,
[FromServices] ICryptoHash cryptoHash,
[FromServices] TimeProvider timeProvider,
CancellationToken cancellationToken)
{
@@ -127,7 +128,7 @@ public static class PackRunEndpoints
var tenantId = tenantResolver.Resolve(context);
var now = timeProvider.GetUtcNow();
var parameters = request.Parameters ?? "{}";
var parametersDigest = ComputeDigest(parameters);
var parametersDigest = ComputeDigest(cryptoHash, parameters);
var idempotencyKey = request.IdempotencyKey ?? $"pack-run:{request.PackId}:{parametersDigest}:{now:yyyyMMddHHmm}";
// Check for existing pack run with same idempotency key
@@ -429,6 +430,7 @@ public static class PackRunEndpoints
[FromServices] IPackRunRepository packRunRepository,
[FromServices] IPackRunLogRepository logRepository,
[FromServices] IEventPublisher eventPublisher,
[FromServices] ICryptoHash cryptoHash,
[FromServices] TimeProvider timeProvider,
CancellationToken cancellationToken)
{
@@ -471,7 +473,7 @@ public static class PackRunEndpoints
cancellationToken);
// Append system log entry
var log = PackRunLog.System(packRunId, tenantId, 0, PackLogLevel.Info, "Pack run started", null, now);
var log = PackRunLog.System(cryptoHash, packRunId, tenantId, 0, PackLogLevel.Info, "Pack run started", null, now);
await logRepository.AppendAsync(log, cancellationToken);
OrchestratorMetrics.PackRunStarted(tenantId, packRun.PackId);
@@ -499,6 +501,7 @@ public static class PackRunEndpoints
[FromServices] IQuotaRepository quotaRepository,
[FromServices] IArtifactRepository artifactRepository,
[FromServices] IEventPublisher eventPublisher,
[FromServices] ICryptoHash cryptoHash,
[FromServices] TimeProvider timeProvider,
CancellationToken cancellationToken)
{
@@ -587,7 +590,7 @@ public static class PackRunEndpoints
// Append system log entry
var (logCount, latestSeq) = await logRepository.GetLogStatsAsync(tenantId, packRunId, cancellationToken);
var completionLog = PackRunLog.System(
packRunId, tenantId, latestSeq + 1,
cryptoHash, packRunId, tenantId, latestSeq + 1,
request.Success ? PackLogLevel.Info : PackLogLevel.Error,
$"Pack run {(request.Success ? "succeeded" : "failed")} with exit code {request.ExitCode}",
null, now);
@@ -649,6 +652,7 @@ public static class PackRunEndpoints
[FromServices] IPackRunRepository packRunRepository,
[FromServices] IPackRunLogRepository logRepository,
[FromServices] IEventPublisher eventPublisher,
[FromServices] ICryptoHash cryptoHash,
[FromServices] TimeProvider timeProvider,
CancellationToken cancellationToken)
{
@@ -687,7 +691,7 @@ public static class PackRunEndpoints
: PackLogLevel.Info;
logs.Add(PackRunLog.Create(
packRunId, tenantId, seq, level,
cryptoHash, packRunId, tenantId, seq, level,
entry.Source,
entry.Message,
entry.Data,
@@ -773,6 +777,7 @@ public static class PackRunEndpoints
[FromServices] IPackRunLogRepository logRepository,
[FromServices] IQuotaRepository quotaRepository,
[FromServices] IEventPublisher eventPublisher,
[FromServices] ICryptoHash cryptoHash,
[FromServices] TimeProvider timeProvider,
CancellationToken cancellationToken)
{
@@ -811,7 +816,7 @@ public static class PackRunEndpoints
// Append system log entry
var (_, latestSeq) = await logRepository.GetLogStatsAsync(tenantId, packRunId, cancellationToken);
var cancelLog = PackRunLog.System(
packRunId, tenantId, latestSeq + 1,
cryptoHash, packRunId, tenantId, latestSeq + 1,
PackLogLevel.Warn, $"Pack run canceled: {request.Reason}", null, now);
await logRepository.AppendAsync(cancelLog, cancellationToken);
@@ -839,6 +844,7 @@ public static class PackRunEndpoints
[FromServices] TenantResolver tenantResolver,
[FromServices] IPackRunRepository packRunRepository,
[FromServices] IEventPublisher eventPublisher,
[FromServices] ICryptoHash cryptoHash,
[FromServices] TimeProvider timeProvider,
CancellationToken cancellationToken)
{
@@ -868,7 +874,7 @@ public static class PackRunEndpoints
var now = timeProvider.GetUtcNow();
var newPackRunId = Guid.NewGuid();
var parameters = request.Parameters ?? packRun.Parameters;
var parametersDigest = request.Parameters != null ? ComputeDigest(parameters) : packRun.ParametersDigest;
var parametersDigest = request.Parameters != null ? ComputeDigest(cryptoHash, parameters) : packRun.ParametersDigest;
var idempotencyKey = request.IdempotencyKey ?? $"retry:{packRunId}:{now:yyyyMMddHHmmss}";
var newPackRun = PackRun.Create(
@@ -1024,11 +1030,10 @@ public static class PackRunEndpoints
return quota;
}
private static string ComputeDigest(string content)
private static string ComputeDigest(ICryptoHash cryptoHash, string content)
{
var bytes = Encoding.UTF8.GetBytes(content);
var hash = SHA256.HashData(bytes);
return Convert.ToHexStringLower(hash);
return cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
}
private static JsonElement? ToPayload<T>(T value)

View File

@@ -3,6 +3,7 @@ using System.Text.Json;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Cryptography;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.RiskProfile.Export;
@@ -145,7 +146,8 @@ internal static class ProfileExportEndpoints
HttpContext context,
[FromBody] ImportProfilesRequest request,
RiskProfileConfigurationService profileService,
ProfileExportService exportService)
ProfileExportService exportService,
ICryptoHash cryptoHash)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
@@ -167,6 +169,7 @@ internal static class ProfileExportEndpoints
// Create an export service with save capability
var importExportService = new ProfileExportService(
cryptoHash: cryptoHash,
timeProvider: TimeProvider.System,
profileLookup: id => profileService.GetProfile(id),
lifecycleLookup: null,

View File

@@ -1,7 +1,7 @@
using System.Collections.Concurrent;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.Engine.Telemetry;
using StellaOps.Policy.RiskProfile.Hashing;
@@ -18,6 +18,7 @@ public sealed class RiskScoringTriggerService
private readonly RiskProfileConfigurationService _profileService;
private readonly IRiskScoringJobStore _jobStore;
private readonly RiskProfileHasher _hasher;
private readonly ICryptoHash _cryptoHash;
private readonly ConcurrentDictionary<string, DateTimeOffset> _recentTriggers;
private readonly TimeSpan _deduplicationWindow;
@@ -25,13 +26,15 @@ public sealed class RiskScoringTriggerService
ILogger<RiskScoringTriggerService> logger,
TimeProvider timeProvider,
RiskProfileConfigurationService profileService,
IRiskScoringJobStore jobStore)
IRiskScoringJobStore jobStore,
ICryptoHash cryptoHash)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_profileService = profileService ?? throw new ArgumentNullException(nameof(profileService));
_jobStore = jobStore ?? throw new ArgumentNullException(nameof(jobStore));
_hasher = new RiskProfileHasher();
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_hasher = new RiskProfileHasher(cryptoHash);
_recentTriggers = new ConcurrentDictionary<string, DateTimeOffset>();
_deduplicationWindow = TimeSpan.FromMinutes(5);
}
@@ -256,10 +259,10 @@ public sealed class RiskScoringTriggerService
}
}
private static string GenerateJobId(string tenantId, string contextId, DateTimeOffset timestamp)
private string GenerateJobId(string tenantId, string contextId, DateTimeOffset timestamp)
{
var seed = $"{tenantId}|{contextId}|{timestamp:O}|{Guid.NewGuid()}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(seed));
return $"rsj-{Convert.ToHexStringLower(hash)[..16]}";
var hash = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(seed), HashPurpose.Content);
return $"rsj-{hash[..16]}";
}
}

View File

@@ -2,6 +2,7 @@ using System.Collections.Concurrent;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Cryptography;
using StellaOps.Policy.Engine.Options;
using StellaOps.Policy.RiskProfile.Hashing;
using StellaOps.Policy.RiskProfile.Merge;
@@ -27,12 +28,14 @@ public sealed class RiskProfileConfigurationService
public RiskProfileConfigurationService(
ILogger<RiskProfileConfigurationService> logger,
IOptions<PolicyEngineOptions> options)
IOptions<PolicyEngineOptions> options,
ICryptoHash cryptoHash)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value.RiskProfile ?? throw new ArgumentNullException(nameof(options));
ArgumentNullException.ThrowIfNull(cryptoHash);
_mergeService = new RiskProfileMergeService();
_hasher = new RiskProfileHasher();
_hasher = new RiskProfileHasher(cryptoHash);
_validator = new RiskProfileValidator();
_profileCache = new ConcurrentDictionary<string, RiskProfileModel>(StringComparer.OrdinalIgnoreCase);
_resolvedCache = new ConcurrentDictionary<string, RiskProfileModel>(StringComparer.OrdinalIgnoreCase);

View File

@@ -1,8 +1,8 @@
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.Engine.Telemetry;
using StellaOps.Policy.RiskProfile.Hashing;
@@ -19,6 +19,7 @@ public sealed class RiskSimulationService
private readonly TimeProvider _timeProvider;
private readonly RiskProfileConfigurationService _profileService;
private readonly RiskProfileHasher _hasher;
private readonly ICryptoHash _cryptoHash;
private static readonly double[] PercentileLevels = { 0.25, 0.50, 0.75, 0.90, 0.95, 0.99 };
private const int TopMoverCount = 10;
@@ -27,12 +28,14 @@ public sealed class RiskSimulationService
public RiskSimulationService(
ILogger<RiskSimulationService> logger,
TimeProvider timeProvider,
RiskProfileConfigurationService profileService)
RiskProfileConfigurationService profileService,
ICryptoHash cryptoHash)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_profileService = profileService ?? throw new ArgumentNullException(nameof(profileService));
_hasher = new RiskProfileHasher();
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_hasher = new RiskProfileHasher(cryptoHash);
}
/// <summary>
@@ -452,10 +455,10 @@ public sealed class RiskSimulationService
InformationalCount: scores.Count(s => s.Severity == RiskSeverity.Informational));
}
private static string GenerateSimulationId(RiskSimulationRequest request, string profileHash)
private string GenerateSimulationId(RiskSimulationRequest request, string profileHash)
{
var seed = $"{request.ProfileId}|{profileHash}|{request.Findings.Count}|{Guid.NewGuid()}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(seed));
return $"rsim-{Convert.ToHexStringLower(hash)[..16]}";
var hash = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(seed), HashPurpose.Content);
return $"rsim-{hash[..16]}";
}
}

View File

@@ -27,6 +27,7 @@
<ProjectReference Include="../__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
<ProjectReference Include="../StellaOps.PolicyDsl/StellaOps.PolicyDsl.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" />
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" />

View File

@@ -1,6 +1,7 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.Cryptography;
using StellaOps.Policy.RiskProfile.Hashing;
using StellaOps.Policy.RiskProfile.Lifecycle;
using StellaOps.Policy.RiskProfile.Models;
@@ -17,6 +18,7 @@ public sealed class ProfileExportService
private const string DefaultAlgorithm = "HMAC-SHA256";
private readonly TimeProvider _timeProvider;
private readonly ICryptoHash _cryptoHash;
private readonly RiskProfileHasher _hasher;
private readonly Func<string, RiskProfileModel?>? _profileLookup;
private readonly Func<string, RiskProfileVersionInfo?>? _lifecycleLookup;
@@ -30,14 +32,16 @@ public sealed class ProfileExportService
};
public ProfileExportService(
ICryptoHash cryptoHash,
TimeProvider? timeProvider = null,
Func<string, RiskProfileModel?>? profileLookup = null,
Func<string, RiskProfileVersionInfo?>? lifecycleLookup = null,
Action<RiskProfileModel>? profileSave = null,
Func<string, string?>? keyLookup = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_timeProvider = timeProvider ?? TimeProvider.System;
_hasher = new RiskProfileHasher();
_hasher = new RiskProfileHasher(cryptoHash);
_profileLookup = profileLookup;
_lifecycleLookup = lifecycleLookup;
_profileSave = profileSave;
@@ -331,15 +335,14 @@ public sealed class ProfileExportService
.ThenBy(p => p.Profile.Version)
.Select(p => p.ContentHash));
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(combined));
return Convert.ToHexStringLower(hashBytes);
return _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(combined), HashPurpose.Content);
}
private static string GenerateBundleId(DateTimeOffset timestamp)
private string GenerateBundleId(DateTimeOffset timestamp)
{
var seed = $"{timestamp:O}|{Guid.NewGuid()}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(seed));
return $"rpb-{Convert.ToHexStringLower(hash)[..16]}";
var hash = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(seed), HashPurpose.Content);
return $"rpb-{hash[..16]}";
}
private static string GetSourceVersion()

View File

@@ -1,7 +1,7 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Cryptography;
using StellaOps.Policy.RiskProfile.Models;
namespace StellaOps.Policy.RiskProfile.Hashing;
@@ -11,6 +11,8 @@ namespace StellaOps.Policy.RiskProfile.Hashing;
/// </summary>
public sealed class RiskProfileHasher
{
private readonly ICryptoHash _cryptoHash;
private static readonly JsonSerializerOptions CanonicalJsonOptions = new()
{
WriteIndented = false,
@@ -22,20 +24,24 @@ public sealed class RiskProfileHasher
},
};
public RiskProfileHasher(ICryptoHash cryptoHash)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
}
/// <summary>
/// Computes a deterministic SHA-256 hash of the risk profile.
/// Computes a deterministic hash of the risk profile using the compliance profile's content algorithm.
/// </summary>
/// <param name="profile">The profile to hash.</param>
/// <returns>Lowercase hex-encoded SHA-256 hash.</returns>
/// <returns>Lowercase hex-encoded hash.</returns>
public string ComputeHash(RiskProfileModel profile)
{
ArgumentNullException.ThrowIfNull(profile);
var canonical = CreateCanonicalForm(profile);
var json = JsonSerializer.Serialize(canonical, CanonicalJsonOptions);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexStringLower(hash);
return _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(json), HashPurpose.Content);
}
/// <summary>
@@ -43,16 +49,15 @@ public sealed class RiskProfileHasher
/// Useful for detecting semantic changes regardless of versioning.
/// </summary>
/// <param name="profile">The profile to hash.</param>
/// <returns>Lowercase hex-encoded SHA-256 hash.</returns>
/// <returns>Lowercase hex-encoded hash.</returns>
public string ComputeContentHash(RiskProfileModel profile)
{
ArgumentNullException.ThrowIfNull(profile);
var canonical = CreateCanonicalContentForm(profile);
var json = JsonSerializer.Serialize(canonical, CanonicalJsonOptions);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexStringLower(hash);
return _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(json), HashPurpose.Content);
}
/// <summary>

View File

@@ -1,6 +1,5 @@
using System.Collections.Concurrent;
using System.Security.Cryptography;
using System.Text;
using StellaOps.Cryptography;
using StellaOps.Policy.RiskProfile.Hashing;
using StellaOps.Policy.RiskProfile.Models;
@@ -16,10 +15,11 @@ public sealed class RiskProfileLifecycleService
private readonly ConcurrentDictionary<string, List<RiskProfileVersionInfo>> _versions;
private readonly ConcurrentDictionary<string, List<RiskProfileLifecycleEvent>> _events;
public RiskProfileLifecycleService(TimeProvider? timeProvider = null)
public RiskProfileLifecycleService(ICryptoHash cryptoHash, TimeProvider? timeProvider = null)
{
ArgumentNullException.ThrowIfNull(cryptoHash);
_timeProvider = timeProvider ?? TimeProvider.System;
_hasher = new RiskProfileHasher();
_hasher = new RiskProfileHasher(cryptoHash);
_versions = new ConcurrentDictionary<string, List<RiskProfileVersionInfo>>(StringComparer.OrdinalIgnoreCase);
_events = new ConcurrentDictionary<string, List<RiskProfileLifecycleEvent>>(StringComparer.OrdinalIgnoreCase);
}

View File

@@ -11,6 +11,10 @@
<PackageReference Include="JsonSchema.Net" Version="5.3.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Schemas\risk-profile-schema@1.json" />
</ItemGroup>

View File

@@ -7,6 +7,7 @@ using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Cryptography;
using StellaOps.Replay.Core;
using StellaOps.Scanner.Core.Replay;
using StellaOps.Scanner.Reachability;
@@ -25,29 +26,33 @@ internal sealed class RecordModeService : IRecordModeService
{
private readonly RecordModeAssembler _assembler;
private readonly ReachabilityReplayWriter _reachability;
private readonly ICryptoHash _cryptoHash;
private readonly IArtifactObjectStore? _objectStore;
private readonly ScannerStorageOptions? _storageOptions;
private readonly TimeProvider _timeProvider;
private readonly ILogger<RecordModeService>? _logger;
public RecordModeService(
ICryptoHash cryptoHash,
IArtifactObjectStore objectStore,
IOptions<ScannerStorageOptions> storageOptions,
TimeProvider timeProvider,
ILogger<RecordModeService> logger)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_objectStore = objectStore ?? throw new ArgumentNullException(nameof(objectStore));
_storageOptions = (storageOptions ?? throw new ArgumentNullException(nameof(storageOptions))).Value;
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_assembler = new RecordModeAssembler(timeProvider);
_assembler = new RecordModeAssembler(cryptoHash, timeProvider);
_reachability = new ReachabilityReplayWriter();
}
// Legacy/testing constructor for unit tests that do not require storage.
public RecordModeService(TimeProvider? timeProvider = null)
public RecordModeService(ICryptoHash cryptoHash, TimeProvider? timeProvider = null)
{
_assembler = new RecordModeAssembler(timeProvider);
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_assembler = new RecordModeAssembler(cryptoHash, timeProvider);
_reachability = new ReachabilityReplayWriter();
_timeProvider = timeProvider ?? TimeProvider.System;
}
@@ -241,7 +246,7 @@ internal sealed class RecordModeService : IRecordModeService
CancellationToken cancellationToken)
{
using var buffer = new MemoryStream();
var result = await ReplayBundleWriter.WriteTarZstAsync(entries, buffer, casPrefix: casPrefix, cancellationToken: cancellationToken).ConfigureAwait(false);
var result = await ReplayBundleWriter.WriteTarZstAsync(_cryptoHash, entries, buffer, casPrefix: casPrefix, cancellationToken: cancellationToken).ConfigureAwait(false);
buffer.Position = 0;

View File

@@ -3,6 +3,7 @@ using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.Replay.Core;
using StellaOps.Scanner.Storage;
using StellaOps.Scanner.Storage.ObjectStore;
@@ -16,12 +17,18 @@ namespace StellaOps.Scanner.Worker.Processing.Replay;
internal sealed class ReplayBundleFetcher
{
private readonly IArtifactObjectStore _objectStore;
private readonly ICryptoHash _cryptoHash;
private readonly ScannerStorageOptions _storageOptions;
private readonly ILogger<ReplayBundleFetcher> _logger;
public ReplayBundleFetcher(IArtifactObjectStore objectStore, ScannerStorageOptions storageOptions, ILogger<ReplayBundleFetcher> logger)
public ReplayBundleFetcher(
IArtifactObjectStore objectStore,
ICryptoHash cryptoHash,
ScannerStorageOptions storageOptions,
ILogger<ReplayBundleFetcher> logger)
{
_objectStore = objectStore ?? throw new ArgumentNullException(nameof(objectStore));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_storageOptions = storageOptions ?? throw new ArgumentNullException(nameof(storageOptions));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
@@ -55,7 +62,7 @@ internal sealed class ReplayBundleFetcher
// Verify hash
await using (var file = File.OpenRead(tempPath))
{
var actualHex = DeterministicHash.Sha256Hex(file);
var actualHex = await DeterministicHash.Sha256HexAsync(_cryptoHash, file, cancellationToken).ConfigureAwait(false);
var expected = NormalizeHash(metadata.ManifestHash);
if (!string.Equals(actualHex, expected, StringComparison.OrdinalIgnoreCase))
{

View File

@@ -15,7 +15,9 @@
<PackageReference Include="OpenTelemetry.Instrumentation.Process" Version="1.12.0-beta.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj" />

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using StellaOps.Cryptography;
using StellaOps.Replay.Core;
namespace StellaOps.Scanner.Core.Replay;
@@ -10,10 +11,12 @@ namespace StellaOps.Scanner.Core.Replay;
/// </summary>
public sealed class RecordModeAssembler
{
private readonly ICryptoHash _cryptoHash;
private readonly TimeProvider _timeProvider;
public RecordModeAssembler(TimeProvider? timeProvider = null)
public RecordModeAssembler(ICryptoHash cryptoHash, TimeProvider? timeProvider = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_timeProvider = timeProvider ?? TimeProvider.System;
}
@@ -31,7 +34,7 @@ public sealed class RecordModeAssembler
ArgumentException.ThrowIfNullOrWhiteSpace(findingsDigest);
var now = _timeProvider.GetUtcNow().UtcDateTime;
var manifestHash = "sha256:" + manifest.ComputeCanonicalSha256();
var manifestHash = "sha256:" + manifest.ComputeCanonicalSha256(_cryptoHash);
return new ReplayRunRecord
{

View File

@@ -14,6 +14,7 @@
<ItemGroup>
<ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -8,8 +8,15 @@ namespace StellaOps.Scanner.Reachability;
/// Builds canonical CodeIDs used by richgraph-v1 to anchor symbols when names are missing.
/// </summary>
/// <remarks>
/// Format: <c>code:&lt;lang&gt;:&lt;base64url-sha256&gt;</c> where the hash is computed over a
/// <para>
/// Format: <c>code:{lang}:{base64url-sha256}</c> where the hash is computed over a
/// canonical tuple that is stable across machines and paths.
/// </para>
/// <para>
/// <strong>INTEROP NOTE:</strong> This static class uses SHA-256 for maximum external tool
/// compatibility. For compliance-profile-aware code IDs that respect GOST/SM3/FIPS profiles,
/// use <see cref="CodeIdBuilder"/> with an injected <see cref="StellaOps.Cryptography.ICryptoHash"/>.
/// </para>
/// </remarks>
public static class CodeId
{

View File

@@ -0,0 +1,131 @@
using System;
using System.Text;
using StellaOps.Cryptography;
namespace StellaOps.Scanner.Reachability;
/// <summary>
/// Builds canonical CodeIDs with compliance-profile-aware hashing.
/// Uses <see cref="HashPurpose.Symbol"/> which resolves to:
/// - SHA-256 for "world" and "fips" profiles
/// - GOST3411-2012-256 for "gost" profile
/// - SM3 for "sm" profile
/// </summary>
/// <remarks>
/// Format: <c>code:{lang}:{base64url-hash}</c> where the hash is computed over a
/// canonical tuple that is stable across machines and paths.
/// </remarks>
public sealed class CodeIdBuilder
{
private readonly ICryptoHash _cryptoHash;
/// <summary>
/// Creates a new CodeIdBuilder with the specified crypto hash service.
/// </summary>
/// <param name="cryptoHash">Crypto hash service for compliance-aware hashing.</param>
public CodeIdBuilder(ICryptoHash cryptoHash)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
}
/// <summary>
/// Creates a binary code-id from binary components.
/// </summary>
public string ForBinary(string buildId, string section, string? relativePath)
{
var tuple = $"{Norm(buildId)}\0{Norm(section)}\0{Norm(relativePath)}";
return Build("binary", tuple);
}
/// <summary>
/// Creates a .NET code-id from assembly components.
/// </summary>
public string ForDotNet(string assemblyName, string moduleName, string? mvid)
{
var tuple = $"{Norm(assemblyName)}\0{Norm(moduleName)}\0{Norm(mvid)}";
return Build("dotnet", tuple);
}
/// <summary>
/// Creates a binary code-id using canonical address + length tuple.
/// </summary>
public string ForBinarySegment(string format, string fileHash, string address, long? lengthBytes = null, string? section = null, string? codeBlockHash = null)
{
var tuple = $"{Norm(format)}\0{Norm(fileHash)}\0{NormalizeAddress(address)}\0{NormalizeLength(lengthBytes)}\0{Norm(section)}\0{Norm(codeBlockHash)}";
return Build("binary", tuple);
}
/// <summary>
/// Creates a Node code-id from package components.
/// </summary>
public string ForNode(string packageName, string entryPath)
{
var tuple = $"{Norm(packageName)}\0{Norm(entryPath)}";
return Build("node", tuple);
}
/// <summary>
/// Creates a code-id from an existing symbol ID.
/// </summary>
public string FromSymbolId(string symbolId)
{
ArgumentException.ThrowIfNullOrWhiteSpace(symbolId);
return Build("sym", symbolId.Trim());
}
private string Build(string lang, string tuple)
{
var bytes = Encoding.UTF8.GetBytes(tuple);
var hash = _cryptoHash.ComputeHashForPurpose(bytes, HashPurpose.Symbol);
var base64 = Convert.ToBase64String(hash)
.TrimEnd('=')
.Replace('+', '-')
.Replace('/', '_');
return $"code:{lang}:{base64}";
}
private static string NormalizeAddress(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return "0x0";
}
var addrText = value.Trim();
var isHex = addrText.StartsWith("0x", StringComparison.OrdinalIgnoreCase);
if (isHex)
{
addrText = addrText[2..];
}
if (long.TryParse(addrText, isHex ? System.Globalization.NumberStyles.HexNumber : System.Globalization.NumberStyles.Integer, System.Globalization.CultureInfo.InvariantCulture, out var addrValue))
{
if (addrValue < 0)
{
addrValue = 0;
}
return $"0x{addrValue:x}";
}
addrText = addrText.TrimStart('0');
if (addrText.Length == 0)
{
addrText = "0";
}
return $"0x{addrText.ToLowerInvariant()}";
}
private static string NormalizeLength(long? value)
{
if (value is null or <= 0)
{
return "unknown";
}
return value.Value.ToString("D", System.Globalization.CultureInfo.InvariantCulture);
}
private static string Norm(string? value) => (value ?? string.Empty).Trim();
}

View File

@@ -1,20 +1,24 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Cryptography;
namespace StellaOps.Scanner.Reachability;
/// <summary>
/// Writes richgraph-v1 documents to disk with canonical ordering and BLAKE3 hash.
/// Writes richgraph-v1 documents to disk with canonical ordering and compliance-profile-aware hashing.
/// Uses <see cref="HashPurpose.Graph"/> for content addressing, which resolves to:
/// - BLAKE3-256 for "world" profile
/// - SHA-256 for "fips" profile
/// - GOST3411-2012-256 for "gost" profile
/// - SM3 for "sm" profile
/// </summary>
public sealed class RichGraphWriter
{
private readonly ICryptoHash _cryptoHash;
private static readonly JsonWriterOptions JsonOptions = new()
{
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
@@ -22,6 +26,15 @@ public sealed class RichGraphWriter
SkipValidation = false
};
/// <summary>
/// Creates a new RichGraphWriter with the specified crypto hash service.
/// </summary>
/// <param name="cryptoHash">Crypto hash service for compliance-aware hashing.</param>
public RichGraphWriter(ICryptoHash cryptoHash)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
}
public async Task<RichGraphWriteResult> WriteAsync(
RichGraph graph,
string outputRoot,
@@ -46,7 +59,7 @@ public sealed class RichGraphWriter
}
var bytes = await File.ReadAllBytesAsync(graphPath, cancellationToken).ConfigureAwait(false);
var graphHash = ComputeSha256(bytes);
var graphHash = _cryptoHash.ComputePrefixedHashForPurpose(bytes, HashPurpose.Graph);
var metaPath = Path.Combine(root, "meta.json");
await using (var stream = File.Create(metaPath))
@@ -169,12 +182,6 @@ public sealed class RichGraphWriter
writer.WriteEndObject();
}
private static string ComputeSha256(IReadOnlyList<byte> bytes)
{
using var sha = SHA256.Create();
var hash = sha.ComputeHash(bytes.ToArray());
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
}
public sealed record RichGraphWriteResult(

View File

@@ -9,5 +9,6 @@
<ProjectReference Include="..\StellaOps.Scanner.Surface.Env\StellaOps.Scanner.Surface.Env.csproj" />
<ProjectReference Include="..\..\StellaOps.Scanner.Analyzers.Native\StellaOps.Scanner.Analyzers.Native.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Replay.Core\StellaOps.Replay.Core.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>

View File

@@ -10,8 +10,15 @@ namespace StellaOps.Scanner.Reachability;
/// to remain reproducible and cacheable across hosts.
/// </summary>
/// <remarks>
/// <para>
/// Format: <c>sym:{lang}:{stable-fragment}</c>
/// where stable-fragment is SHA-256(base64url-no-pad) of the canonical tuple per language.
/// </para>
/// <para>
/// <strong>INTEROP NOTE:</strong> This static class uses SHA-256 for maximum external tool
/// compatibility. For compliance-profile-aware symbol IDs that respect GOST/SM3/FIPS profiles,
/// use <see cref="SymbolIdBuilder"/> with an injected <see cref="StellaOps.Cryptography.ICryptoHash"/>.
/// </para>
/// </remarks>
public static class SymbolId
{

View File

@@ -0,0 +1,209 @@
using System;
using System.Text;
using StellaOps.Cryptography;
namespace StellaOps.Scanner.Reachability;
/// <summary>
/// Builds canonical SymbolIDs with compliance-profile-aware hashing.
/// Uses <see cref="HashPurpose.Symbol"/> which resolves to:
/// - SHA-256 for "world" and "fips" profiles
/// - GOST3411-2012-256 for "gost" profile
/// - SM3 for "sm" profile
/// </summary>
/// <remarks>
/// Format: <c>sym:{lang}:{stable-fragment}</c>
/// where stable-fragment is base64url-no-pad of the profile-appropriate hash of the canonical tuple.
/// </remarks>
public sealed class SymbolIdBuilder
{
private readonly ICryptoHash _cryptoHash;
/// <summary>
/// Creates a new SymbolIdBuilder with the specified crypto hash service.
/// </summary>
/// <param name="cryptoHash">Crypto hash service for compliance-aware hashing.</param>
public SymbolIdBuilder(ICryptoHash cryptoHash)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
}
/// <summary>
/// Creates a Java symbol ID from method signature components.
/// </summary>
public string ForJava(string package, string className, string method, string descriptor)
{
var tuple = $"{Lower(package)}\0{Lower(className)}\0{Lower(method)}\0{Lower(descriptor)}";
return Build(SymbolId.Lang.Java, tuple);
}
/// <summary>
/// Creates a .NET symbol ID from member signature components.
/// </summary>
public string ForDotNet(string assemblyName, string ns, string typeName, string memberSignature)
{
var tuple = $"{Norm(assemblyName)}\0{Norm(ns)}\0{Norm(typeName)}\0{Norm(memberSignature)}";
return Build(SymbolId.Lang.DotNet, tuple);
}
/// <summary>
/// Creates a Node/Deno symbol ID from module export components.
/// </summary>
public string ForNode(string pkgNameOrPath, string exportPath, string kind)
{
var tuple = $"{Norm(pkgNameOrPath)}\0{Norm(exportPath)}\0{Norm(kind)}";
return Build(SymbolId.Lang.Node, tuple);
}
/// <summary>
/// Creates a Deno symbol ID from module export components.
/// </summary>
public string ForDeno(string pkgNameOrPath, string exportPath, string kind)
{
var tuple = $"{Norm(pkgNameOrPath)}\0{Norm(exportPath)}\0{Norm(kind)}";
return Build(SymbolId.Lang.Deno, tuple);
}
/// <summary>
/// Creates a Go symbol ID from function/method components.
/// </summary>
public string ForGo(string modulePath, string packagePath, string receiver, string func)
{
var tuple = $"{Norm(modulePath)}\0{Norm(packagePath)}\0{Norm(receiver)}\0{Norm(func)}";
return Build(SymbolId.Lang.Go, tuple);
}
/// <summary>
/// Creates a Rust symbol ID from item components.
/// </summary>
public string ForRust(string crateName, string modulePath, string itemName, string? mangled = null)
{
var tuple = $"{Norm(crateName)}\0{Norm(modulePath)}\0{Norm(itemName)}\0{Norm(mangled)}";
return Build(SymbolId.Lang.Rust, tuple);
}
/// <summary>
/// Creates a Swift symbol ID from member components.
/// </summary>
public string ForSwift(string module, string typeName, string member, string? mangled = null)
{
var tuple = $"{Norm(module)}\0{Norm(typeName)}\0{Norm(member)}\0{Norm(mangled)}";
return Build(SymbolId.Lang.Swift, tuple);
}
/// <summary>
/// Creates a shell symbol ID from script/function components.
/// </summary>
public string ForShell(string scriptRelPath, string functionOrCmd)
{
var tuple = $"{Norm(scriptRelPath)}\0{Norm(functionOrCmd)}";
return Build(SymbolId.Lang.Shell, tuple);
}
/// <summary>
/// Creates a binary symbol ID from ELF/PE/Mach-O components.
/// </summary>
public string ForBinary(string buildId, string section, string symbolName)
=> ForBinaryAddressed(buildId, section, string.Empty, symbolName, "static", null);
/// <summary>
/// Creates a binary symbol ID that includes file hash, section, address, and linkage.
/// </summary>
public string ForBinaryAddressed(string fileHash, string section, string address, string symbolName, string linkage, string? codeBlockHash = null)
{
var tuple = $"{Norm(fileHash)}\0{Norm(section)}\0{NormalizeAddress(address)}\0{Norm(symbolName)}\0{Norm(linkage)}\0{Norm(codeBlockHash)}";
return Build(SymbolId.Lang.Binary, tuple);
}
/// <summary>
/// Creates a Python symbol ID from module/function components.
/// </summary>
public string ForPython(string packageOrPath, string modulePath, string qualifiedName)
{
var tuple = $"{Norm(packageOrPath)}\0{Norm(modulePath)}\0{Norm(qualifiedName)}";
return Build(SymbolId.Lang.Python, tuple);
}
/// <summary>
/// Creates a Ruby symbol ID from module/method components.
/// </summary>
public string ForRuby(string gemOrPath, string modulePath, string methodName)
{
var tuple = $"{Norm(gemOrPath)}\0{Norm(modulePath)}\0{Norm(methodName)}";
return Build(SymbolId.Lang.Ruby, tuple);
}
/// <summary>
/// Creates a PHP symbol ID from namespace/function components.
/// </summary>
public string ForPhp(string composerPackage, string ns, string qualifiedName)
{
var tuple = $"{Norm(composerPackage)}\0{Norm(ns)}\0{Norm(qualifiedName)}";
return Build(SymbolId.Lang.Php, tuple);
}
/// <summary>
/// Creates a symbol ID from a pre-computed canonical tuple and language.
/// </summary>
public string FromTuple(string lang, string canonicalTuple)
{
ArgumentException.ThrowIfNullOrWhiteSpace(lang);
return Build(lang, canonicalTuple);
}
private string Build(string lang, string tuple)
{
var hash = ComputeFragment(tuple);
return $"sym:{lang}:{hash}";
}
private string ComputeFragment(string tuple)
{
var bytes = Encoding.UTF8.GetBytes(tuple);
var hash = _cryptoHash.ComputeHashForPurpose(bytes, HashPurpose.Symbol);
// Base64url without padding per spec
return Convert.ToBase64String(hash)
.Replace('+', '-')
.Replace('/', '_')
.TrimEnd('=');
}
private static string NormalizeAddress(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return "0x0";
}
var addrText = value.Trim();
var isHex = addrText.StartsWith("0x", StringComparison.OrdinalIgnoreCase);
if (isHex)
{
addrText = addrText[2..];
}
if (long.TryParse(addrText, isHex ? System.Globalization.NumberStyles.HexNumber : System.Globalization.NumberStyles.Integer, System.Globalization.CultureInfo.InvariantCulture, out var addrValue))
{
if (addrValue < 0)
{
addrValue = 0;
}
return $"0x{addrValue:x}";
}
addrText = addrText.TrimStart('0');
if (addrText.Length == 0)
{
addrText = "0";
}
return $"0x{addrText.ToLowerInvariant()}";
}
private static string Lower(string? value)
=> string.IsNullOrWhiteSpace(value) ? string.Empty : value.Trim().ToLowerInvariant();
private static string Norm(string? value)
=> string.IsNullOrWhiteSpace(value) ? string.Empty : value.Trim();
}

View File

@@ -1,63 +0,0 @@
# Notices for Eclipse Temurin
This content is produced and maintained by the Eclipse Temurin project.
* Project home: https://projects.eclipse.org/projects/adoptium.temurin
## Trademarks
Eclipse Temurin is a trademark of the Eclipse Foundation. Eclipse, and the
Eclipse Logo are registered trademarks of the Eclipse Foundation.
Java and all Java-based trademarks are trademarks of Oracle Corporation in
the United States, other countries, or both.
## Copyright
All content is the property of the respective authors or their employers.
For more information regarding authorship of content, please consult the
listed source code repository logs.
## Declared Project Licenses
This program and the accompanying materials are made available under the terms
of the GNU General Public License, version 2, with the Classpath Exception.
Additional information relating to the program and accompanying materials
license and usage is available as follows.
* For Eclipse Temurin version 8 see the LICENSE and ASSEMBLY_EXCEPTION files
in the top level directory of the installation.
* For Eclipse Temurin version 9 or later see the files under the legal/
directory in the top level directory of the installation.
SPDX-License-Identifier: GPL-2.0 WITH Classpath-exception-2.0
## Source Code
The project maintains the following source code repositories which may be
relevant to this content:
* https://github.com/adoptium/temurin-build
* https://github.com/adoptium/jdk
* https://github.com/adoptium/jdk8u
* https://github.com/adoptium/jdk11u
* https://github.com/adoptium/jdk17u
* https://github.com/adoptium/jdk20
* and so on
## Third-party Content
This program and accompanying materials contains third-party content.
* For Eclipse Temurin version 8 see the THIRD_PARTY_LICENSE file in the
top level directory of the installation.
* For Eclipse Temurin version 9 or later see the files under the legal/
directory in the top level directory of the installation.
## Cryptography
Content may contain encryption software. The country in which you are currently
may have restrictions on the import, possession, and use, and/or re-export to
another country, of encryption software. BEFORE using any encryption software,
please check the country's laws, regulations and policies concerning the import,
possession, or use, and re-export of encryption software, to see if this is
permitted.

View File

@@ -1,180 +0,0 @@
################################################################################
# JAXP Configuration File
#
# jaxp.properties (this file) is the default configuration file for JAXP, the API
# defined in the java.xml module. It is in java.util.Properties format and typically
# located in the {java.home}/conf directory. It may contain key/value pairs for
# specifying the implementation classes of JAXP factories and/or properties
# that have corresponding system properties.
#
# A user-specified configuration file can be set up using the system property
# java.xml.config.file to override any or all of the entries in jaxp.properties.
# The following statement provides myConfigurationFile as a custom configuration
# file:
# java -Djava.xml.config.file=myConfigurationFile
################################################################################
# ---- JAXP Default Configuration ----
#
# The JAXP default configuration (jaxp.properties) contains entries for the
# Factory Lookup Mechanism and properties with corresponding system properties.
# The values are generally set to the default values of the properties.
#
#
# JAXP Lookup Mechanism:
#
# The JAXP configuration file ranks 2nd to the System Property in the precedent
# order of the JAXP Lookup Mechanism. When the System Property is not specified,
# a JAXP factory reads the configuration file in order to locate an implementation
# class. If found, the class specified will be used as the factory implementation
# class.
#
# The format of an entry is key=value where the key is the fully qualified name
# of the factory and value that of the implementation class. The following entry
# set a DocumentBuilderFactory implementation class:
#
# javax.xml.parsers.DocumentBuilderFactory=com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl
#
#
# Java SE and JDK Implementation Specific Properties:
#
# The JAXP configuration file ranks above the default settings in the Property
# Precedence in that its entries will override the default values of the corresponding
# properties.
#
# All properties that have System Properties defined in Java SE or supported
# by the JDK Implementation can be placed in the configuration file to override
# the default property values. The format is:
# system-property-name=value
#
# For example, the FILES property in CatalogFeatures has an associated system
# property called javax.xml.catalog.files. An entry for the FILES property in the
# configuration file would therefore use javax.xml.catalog.files as the key, that
# is:
# javax.xml.catalog.files=strict
#
#
# Extension Functions:
#
# This property determines whether XSLT and XPath extension functions are allowed.
# The value type is boolean and the default value is true (allowing
# extension functions). The following entry would override the default value and
# disallow extension functions:
#
# jdk.xml.enableExtensionFunctions=false
#
#
# Overriding the default parser:
#
# This property allows using a third party implementation to override the default
# parser provided by the JDK. The value type is boolean and the default value is
# false, disallowing overriding the default parser. The setting below reflects
# the default property setting:
#
jdk.xml.overrideDefaultParser=false
#
#
# External Access Properties:
#
# The External Access Properties are defined in javax.xml.XMLConstants. Their
# system properties are javax.xml.accessExternalDTD, javax.xml.accessExternalSchema,
# and javax.xml.accessExternalStylesheet. The values are a list of protocols separated
# by comma, plus empty string ("") to represent no protocol allowed and the key
# word "all" for all access. The default is "all", allowing all external resources
# to be fetched. The followings are example of external access settings:
#
# allow local (file) DTDs to be retrieved
# javax.xml.accessExternalDTD=file
#
# allow local (file) and remote (http) external schemas
# javax.xml.accessExternalSchema=file, http
#
# reject any external stylesheets
# javax.xml.accessExternalStylesheet=""
#
# allow all external stylesheets
# javax.xml.accessExternalStylesheet="all"
#
#
# Catalog Properties:
#
# The Catalog API defines four features: FILES, PREFER, DEFER and RESOLVE.
# Except PREFER, all other properties can be placed in the configuration file
# using the system properties defined for them.
#
# FILES: A semicolon-delimited list of URIs to locate the catalog files. The URIs
# must be absolute and have a URL protocol handler for the URI scheme. The following
# is an example of setting up a catalog file:
#
# javax.xml.catalog.files = file:///users/auser/catalog/catalog.xml
#
# DEFER: Indicates that the alternative catalogs including those specified in
# delegate entries or nextCatalog are not read until they are needed. The value
# is a boolean and the default value is true.
#
# javax.xml.catalog.defer=true
#
# RESOLVE: Determines the action if there is no matching entry found after all of
# the specified catalogs are exhausted. The values are key words: strict, continue,
# and ignore. The default is strict. The following setting reflects the default
# setting.
#
# javax.xml.catalog.resolve=strict
#
#
# useCatalog:
# This property instructs XML processors to use XML Catalogs to resolve entity
# references. The value is a boolean and the default value is true.
#
# javax.xml.useCatalog=true
#
#
# Implementation Specific Properties - Limits
#
# Limits have a value type Integer. The values must be positive integers. Zero
# means no limit.
#
# Limits the number of entity expansions. The default value is 64000
# jdk.xml.entityExpansionLimit=64000
#
# Limits the total size of all entities that include general and parameter entities.
# The size is calculated as an aggregation of all entities. The default value is 5x10^7.
# jdk.xml.totalEntitySizeLimit=5E7
#
# Limits the maximum size of any general entities. The default value is 0.
# jdk.xml.maxGeneralEntitySizeLimit=0
#
# Limits the maximum size of any parameter entities, including the result of
# nesting multiple parameter entities. The default value is 10^6.
# jdk.xml.maxParameterEntitySizeLimit=1E6
#
# Limits the total number of nodes in all entity references. The default value is 3x10^6.
# jdk.xml.entityReplacementLimit=3E6
#
# Limits the number of attributes an element can have. The default value is 10000.
# jdk.xml.elementAttributeLimit=10000
#
# Limits the number of content model nodes that may be created when building a
# grammar for a W3C XML Schema that contains maxOccurs attributes with values
# other than "unbounded". The default value is 5000.
# jdk.xml.maxOccurLimit=5000
#
# Limits the maximum element depth. The default value is 0.
# jdk.xml.maxElementDepth=0
#
# Limits the maximum size of XML names, including element name, attribute name
# and namespace prefix and URI. The default value is 1000.
jdk.xml.maxXMLNameLimit=1000
#
#
# XPath Limits
#
# Limits the number of groups an XPath expression can contain. The default value is 10.
jdk.xml.xpathExprGrpLimit=10
#
# Limits the number of operators an XPath expression can contain. The default value is 100.
jdk.xml.xpathExprOpLimit=100
#
# Limits the total number of XPath operators in an XSL Stylesheet. The default value is 10000.
jdk.xml.xpathTotalOpLimit=10000

View File

@@ -1,63 +0,0 @@
############################################################
# Default Logging Configuration File
#
# You can use a different file by specifying a filename
# with the java.util.logging.config.file system property.
# For example, java -Djava.util.logging.config.file=myfile
############################################################
############################################################
# Global properties
############################################################
# "handlers" specifies a comma-separated list of log Handler
# classes. These handlers will be installed during VM startup.
# Note that these classes must be on the system classpath.
# By default we only configure a ConsoleHandler, which will only
# show messages at the INFO and above levels.
handlers= java.util.logging.ConsoleHandler
# To also add the FileHandler, use the following line instead.
#handlers= java.util.logging.FileHandler, java.util.logging.ConsoleHandler
# Default global logging level.
# This specifies which kinds of events are logged across
# all loggers. For any given facility this global level
# can be overridden by a facility-specific level
# Note that the ConsoleHandler also has a separate level
# setting to limit messages printed to the console.
.level= INFO
############################################################
# Handler specific properties.
# Describes specific configuration info for Handlers.
############################################################
# default file output is in user's home directory.
java.util.logging.FileHandler.pattern = %h/java%u.log
java.util.logging.FileHandler.limit = 50000
java.util.logging.FileHandler.count = 1
# Default number of locks FileHandler can obtain synchronously.
# This specifies maximum number of attempts to obtain lock file by FileHandler
# implemented by incrementing the unique field %u as per FileHandler API documentation.
java.util.logging.FileHandler.maxLocks = 100
java.util.logging.FileHandler.formatter = java.util.logging.XMLFormatter
# Limit the messages that are printed on the console to INFO and above.
java.util.logging.ConsoleHandler.level = INFO
java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
# Example to customize the SimpleFormatter output format
# to print one-line log message like this:
# <level>: <log message> [<date/time>]
#
# java.util.logging.SimpleFormatter.format=%4$s: %5$s [%1$tc]%n
############################################################
# Facility-specific properties.
# Provides extra control for each logger.
############################################################
# For example, set the com.xyz.foo logger to only log SEVERE
# messages:
# com.xyz.foo.level = SEVERE

View File

@@ -1,79 +0,0 @@
######################################################################
# Default Access Control File for Remote JMX(TM) Monitoring
######################################################################
#
# Access control file for Remote JMX API access to monitoring.
# This file defines the allowed access for different roles. The
# password file (jmxremote.password by default) defines the roles and their
# passwords. To be functional, a role must have an entry in
# both the password and the access files.
#
# The default location of this file is $JRE/conf/management/jmxremote.access
# You can specify an alternate location by specifying a property in
# the management config file $JRE/conf/management/management.properties
# (See that file for details)
#
# The file format for password and access files is syntactically the same
# as the Properties file format. The syntax is described in the Javadoc
# for java.util.Properties.load.
# A typical access file has multiple lines, where each line is blank,
# a comment (like this one), or an access control entry.
#
# An access control entry consists of a role name, and an
# associated access level. The role name is any string that does not
# itself contain spaces or tabs. It corresponds to an entry in the
# password file (jmxremote.password). The access level is one of the
# following:
# "readonly" grants access to read attributes of MBeans.
# For monitoring, this means that a remote client in this
# role can read measurements but cannot perform any action
# that changes the environment of the running program.
# "readwrite" grants access to read and write attributes of MBeans,
# to invoke operations on them, and optionally
# to create or remove them. This access should be granted
# only to trusted clients, since they can potentially
# interfere with the smooth operation of a running program.
#
# The "readwrite" access level can optionally be followed by the "create" and/or
# "unregister" keywords. The "unregister" keyword grants access to unregister
# (delete) MBeans. The "create" keyword grants access to create MBeans of a
# particular class or of any class matching a particular pattern. Access
# should only be granted to create MBeans of known and trusted classes.
#
# For example, the following entry would grant readwrite access
# to "controlRole", as well as access to create MBeans of the class
# javax.management.monitor.CounterMonitor and to unregister any MBean:
# controlRole readwrite \
# create javax.management.monitor.CounterMonitorMBean \
# unregister
# or equivalently:
# controlRole readwrite unregister create javax.management.monitor.CounterMBean
#
# The following entry would grant readwrite access as well as access to create
# MBeans of any class in the packages javax.management.monitor and
# javax.management.timer:
# controlRole readwrite \
# create javax.management.monitor.*,javax.management.timer.* \
# unregister
#
# The \ character is defined in the Properties file syntax to allow continuation
# lines as shown here. A * in a class pattern matches a sequence of characters
# other than dot (.), so javax.management.monitor.* matches
# javax.management.monitor.CounterMonitor but not
# javax.management.monitor.foo.Bar.
#
# A given role should have at most one entry in this file. If a role
# has no entry, it has no access.
# If multiple entries are found for the same role name, then the last
# access entry is used.
#
#
# Default access control entries:
# o The "monitorRole" role has readonly access.
# o The "controlRole" role has readwrite access and can create the standard
# Timer and Monitor MBeans defined by the JMX API.
monitorRole readonly
controlRole readwrite \
create javax.management.monitor.*,javax.management.timer.* \
unregister

View File

@@ -1,115 +0,0 @@
# ----------------------------------------------------------------------
# Template for jmxremote.password
#
# o Copy this template to jmxremote.password
# o Set the user/password entries in jmxremote.password
# o Change the permission of jmxremote.password to be accessible
# only by the owner.
# o The jmxremote.passwords file will be re-written by the server
# to replace all plain text passwords with hashed passwords when
# the file is read by the server.
#
##############################################################
# Password File for Remote JMX Monitoring
##############################################################
#
# Password file for Remote JMX API access to monitoring. This
# file defines the different roles and their passwords. The access
# control file (jmxremote.access by default) defines the allowed
# access for each role. To be functional, a role must have an entry
# in both the password and the access files.
#
# Default location of this file is $JRE/conf/management/jmxremote.password
# You can specify an alternate location by specifying a property in
# the management config file $JRE/conf/management/management.properties
# or by specifying a system property (See that file for details).
##############################################################
# File format of the jmxremote.password file
##############################################################
#
# The file contains multiple lines where each line is blank,
# a comment (like this one), or a password entry.
#
# password entry follows the below syntax
# role_name W [clearPassword|hashedPassword]
#
# role_name is any string that does not itself contain spaces or tabs.
# W = spaces or tabs
#
# Passwords can be specified via clear text or via a hash. Clear text password
# is any string that does not contain spaces or tabs. Hashed passwords must
# follow the below format.
# hashedPassword = base64_encoded_64_byte_salt W base64_encoded_hash W hash_algorithm
# where,
# base64_encoded_64_byte_salt = 64 byte random salt
# base64_encoded_hash = Hash_algorithm(password + salt)
# W = spaces or tabs
# hash_algorithm = Algorithm string specified using the format below
# https://docs.oracle.com/javase/9/docs/specs/security/standard-names.html#messagedigest-algorithms
# This is an optional field. If not specified, SHA3-512 will be assumed.
#
# If passwords are in clear, they will be overwritten by their hash if all of
# the below criteria are met.
# * com.sun.management.jmxremote.password.toHashes property is set to true in
# management.properties file
# * the password file is writable
# * the system security policy allows writing into the password file, if a
# security manager is configured
#
# In order to change the password for a role, replace the hashed password entry
# with a new clear text password or a new hashed password. If the new password
# is in clear, it will be replaced with its hash when a new login attempt is made.
#
# A given role should have at most one entry in this file. If a role
# has no entry, it has no access.
# If multiple entries are found for the same role name, then the last one
# is used.
#
# A user generated hashed password file can also be used instead of clear-text
# password file. If generated by the user, hashed passwords must follow the
# format specified above.
#
# Caution: It is recommended not to edit the password file while the
# agent is running, as edits could be lost if a client connection triggers the
# hashing of the password file at the same time that the file is externally modified.
# The integrity of the file is guaranteed, but any external edits made to the
# file during the short period between the time that the agent reads the file
# and the time that it writes it back might get lost
##############################################################
# File permissions of the jmxremote.password file
##############################################################
# This file must be made accessible by ONLY the owner,
# otherwise the program will exit with an error.
#
# In a typical installation, this file can be accessed by anybody on the
# local machine, and possibly by people on other machines.
# For security, you should either restrict the access to this file except for owner,
# or specify another, less accessible file in the management config file
# as described above.
#
# In order to prevent inadverent edits to the password file in the
# production environment, it is recommended to deploy a read-only
# hashed password file. The hashed entries for clear passwords can be generated
# in advance by running the JMX agent.
#
##############################################################
# Sample of the jmxremote.password file
##############################################################
# Following are two commented-out entries. The "monitorRole" role has
# password "QED". The "controlRole" role has password "R&D". This is an example
# of specifying passwords in the clear
#
# monitorRole QED
# controlRole R&D
#
# Once a login attempt is made, passwords will be hashed and the file will have
# below entries with clear passwords overwritten by their respective
# SHA3-512 hash
#
# monitorRole trilby APzBTt34rV2l+OMbuvbnOQ4si8UZmfRCVbIY1+fAofV5CkQzXS/FDMGteQQk/R3q1wtt104qImzJEA7gCwl6dw== 4EeTdSJ7X6Imu0Mb+dWqIns7a7QPIBoM3NB/XlpMQSPSicE7PnlALVWn2pBY3Q3pGDHyAb32Hd8GUToQbUhAjA== SHA3-512
# controlRole roHEJSbRqSSTII4Z4+NOCV2OJaZVQ/dw153Fy2u4ILDP9XiZ426GwzCzc3RtpoqNMwqYIcfdd74xWXSMrWtGaA== w9qDsekgKn0WOVJycDyU0kLBa081zbStcCjUAVEqlfon5Sgx7XHtaodbmzpLegA1jT7Ag36T0zHaEWRHJe2fdA== SHA3-512
#

View File

@@ -1,327 +0,0 @@
#####################################################################
# Default Configuration File for Java Platform Management
#####################################################################
#
# The Management Configuration file (in java.util.Properties format)
# will be read if one of the following system properties is set:
# -Dcom.sun.management.jmxremote.port=<port-number>
# or -Dcom.sun.management.config.file=<this-file>
#
# The default Management Configuration file is:
#
# $JRE/conf/management/management.properties
#
# Another location for the Management Configuration File can be specified
# by the following property on the Java command line:
#
# -Dcom.sun.management.config.file=<this-file>
#
# If -Dcom.sun.management.config.file=<this-file> is set, the port
# number for the management agent can be specified in the config file
# using the following lines:
#
# ################ Management Agent Port #########################
#
# For setting the JMX RMI agent port use the following line
# com.sun.management.jmxremote.port=<port-number>
#
# For setting the JMX local server port use the following line
# com.sun.management.jmxremote.local.port=<port-number>
#####################################################################
# Optional Instrumentation
#####################################################################
#
# By default only the basic instrumentation with low overhead is on.
# The following properties allow to selectively turn on optional
# instrumentation which are off by default and may have some
# additional overhead.
#
# com.sun.management.enableThreadContentionMonitoring
#
# This option enables thread contention monitoring if the
# Java virtual machine supports such instrumentation.
# Refer to the specification for the java.lang.management.ThreadMXBean
# interface - see isThreadContentionMonitoringSupported() method.
#
# To enable thread contention monitoring, uncomment the following line
# com.sun.management.enableThreadContentionMonitoring
#####################################################################
# RMI Management Properties
#####################################################################
#
# If system property -Dcom.sun.management.jmxremote.port=<port-number>
# is set then
# - A MBean server is started
# - JRE Platform MBeans are registered in the MBean server
# - RMI connector is published in a private readonly registry at
# specified port using a well known name, "jmxrmi"
# - the following properties are read for JMX remote management.
#
# The configuration can be specified only at startup time.
# Later changes to above system property (e.g. via setProperty method),
# this config file, the password file, or the access file have no effect to the
# running MBean server, the connector, or the registry.
#
#
# ########## RMI connector settings for local management ##########
#
# com.sun.management.jmxremote.local.only=true|false
# Default for this property is true. (Case for true/false ignored)
# If this property is specified as true then the local JMX RMI connector
# server will only accept connection requests from clients running on
# the host where the out-of-the-box JMX management agent is running.
# In order to ensure backwards compatibility this property could be
# set to false. However, deploying the local management agent in this
# way is discouraged because the local JMX RMI connector server will
# accept connection requests from any client either local or remote.
# For remote management the remote JMX RMI connector server should
# be used instead with authentication and SSL/TLS encryption enabled.
#
# For allowing the local management agent accept local
# and remote connection requests use the following line
# com.sun.management.jmxremote.local.only=false
#
# ###################### RMI SSL #############################
#
# com.sun.management.jmxremote.ssl=true|false
# Default for this property is true. (Case for true/false ignored)
# If this property is specified as false then SSL is not used.
#
# For RMI monitoring without SSL use the following line
# com.sun.management.jmxremote.ssl=false
# com.sun.management.jmxremote.ssl.config.file=filepath
# Specifies the location of the SSL configuration file. A properties
# file can be used to supply the keystore and truststore location and
# password settings thus avoiding to pass them as cleartext in the
# command-line.
#
# The current implementation of the out-of-the-box management agent will
# look up and use the properties specified below to configure the SSL
# keystore and truststore, if present:
# javax.net.ssl.keyStore=<keystore-location>
# javax.net.ssl.keyStorePassword=<keystore-password>
# javax.net.ssl.trustStore=<truststore-location>
# javax.net.ssl.trustStorePassword=<truststore-password>
# Any other properties in the file will be ignored. This will allow us
# to extend the property set in the future if required by the default
# SSL implementation.
#
# If the property "com.sun.management.jmxremote.ssl" is set to false,
# then this property is ignored.
#
# For supplying the keystore settings in a file use the following line
# com.sun.management.jmxremote.ssl.config.file=filepath
# com.sun.management.jmxremote.ssl.enabled.cipher.suites=<cipher-suites>
# The value of this property is a string that is a comma-separated list
# of SSL/TLS cipher suites to enable. This property can be specified in
# conjunction with the previous property "com.sun.management.jmxremote.ssl"
# in order to control which particular SSL/TLS cipher suites are enabled
# for use by accepted connections. If this property is not specified then
# the SSL/TLS RMI Server Socket Factory uses the SSL/TLS cipher suites that
# are enabled by default.
#
# com.sun.management.jmxremote.ssl.enabled.protocols=<protocol-versions>
# The value of this property is a string that is a comma-separated list
# of SSL/TLS protocol versions to enable. This property can be specified in
# conjunction with the previous property "com.sun.management.jmxremote.ssl"
# in order to control which particular SSL/TLS protocol versions are
# enabled for use by accepted connections. If this property is not
# specified then the SSL/TLS RMI Server Socket Factory uses the SSL/TLS
# protocol versions that are enabled by default.
#
# com.sun.management.jmxremote.ssl.need.client.auth=true|false
# Default for this property is false. (Case for true/false ignored)
# If this property is specified as true in conjunction with the previous
# property "com.sun.management.jmxremote.ssl" then the SSL/TLS RMI Server
# Socket Factory will require client authentication.
#
# For RMI monitoring with SSL client authentication use the following line
# com.sun.management.jmxremote.ssl.need.client.auth=true
# com.sun.management.jmxremote.registry.ssl=true|false
# Default for this property is false. (Case for true/false ignored)
# If this property is specified as true then the RMI registry used
# to bind the RMIServer remote object is protected with SSL/TLS
# RMI Socket Factories that can be configured with the properties:
# com.sun.management.jmxremote.ssl.config.file
# com.sun.management.jmxremote.ssl.enabled.cipher.suites
# com.sun.management.jmxremote.ssl.enabled.protocols
# com.sun.management.jmxremote.ssl.need.client.auth
# If the two properties below are true at the same time, i.e.
# com.sun.management.jmxremote.ssl=true
# com.sun.management.jmxremote.registry.ssl=true
# then the RMIServer remote object and the RMI registry are
# both exported with the same SSL/TLS RMI Socket Factories.
#
# For using an SSL/TLS protected RMI registry use the following line
# com.sun.management.jmxremote.registry.ssl=true
#
# ################ RMI User authentication ################
#
# com.sun.management.jmxremote.authenticate=true|false
# Default for this property is true. (Case for true/false ignored)
# If this property is specified as false then no authentication is
# performed and all users are allowed all access.
#
# For RMI monitoring without any checking use the following line
# com.sun.management.jmxremote.authenticate=false
#
# ################ RMI Login configuration ###################
#
# com.sun.management.jmxremote.login.config=<config-name>
# Specifies the name of a JAAS login configuration entry to use when
# authenticating users of RMI monitoring.
#
# Setting this property is optional - the default login configuration
# specifies a file-based authentication that uses the password file.
#
# When using this property to override the default login configuration
# then the named configuration entry must be in a file that gets loaded
# by JAAS. In addition, the login module(s) specified in the configuration
# should use the name and/or password callbacks to acquire the user's
# credentials. See the NameCallback and PasswordCallback classes in the
# javax.security.auth.callback package for more details.
#
# If the property "com.sun.management.jmxremote.authenticate" is set to
# false, then this property and the password & access files are ignored.
#
# For a non-default login configuration use the following line
# com.sun.management.jmxremote.login.config=<config-name>
#
# ################ RMI Password file location ##################
#
# com.sun.management.jmxremote.password.file=filepath
# Specifies location for password file
# This is optional - default location is
# $JRE/conf/management/jmxremote.password
#
# If the property "com.sun.management.jmxremote.authenticate" is set to
# false, then this property and the password & access files are ignored.
# Otherwise the password file must exist and be in the valid format.
# If the password file is empty or non-existent then no access is allowed.
#
# For a non-default password file location use the following line
# com.sun.management.jmxremote.password.file=filepath
#
# ################# Hash passwords in password file ##############
# com.sun.management.jmxremote.password.toHashes = true|false
# Default for this property is true.
# Specifies if passwords in the password file should be hashed or not.
# If this property is true, and if the password file is writable, and if the
# system security policy allows writing into the password file,
# all the clear passwords in the password file will be replaced by
# their SHA3-512 hash when the file is read by the server
#
#
# ################ RMI Access file location #####################
#
# com.sun.management.jmxremote.access.file=filepath
# Specifies location for access file
# This is optional - default location is
# $JRE/conf/management/jmxremote.access
#
# If the property "com.sun.management.jmxremote.authenticate" is set to
# false, then this property and the password & access files are ignored.
# Otherwise, the access file must exist and be in the valid format.
# If the access file is empty or non-existent then no access is allowed.
#
# For a non-default password file location use the following line
# com.sun.management.jmxremote.access.file=filepath
#
# ################ Management agent listen interface #########################
#
# com.sun.management.jmxremote.host=<host-or-interface-name>
# Specifies the local interface on which the JMX RMI agent will bind.
# This is useful when running on machines which have several
# interfaces defined. It makes it possible to listen to a specific
# subnet accessible through that interface.
#
# The format of the value for that property is any string accepted
# by java.net.InetAddress.getByName(String).
#
# ################ Filter for ObjectInputStream #############################
# com.sun.management.jmxremote.serial.filter.pattern=<filter-string>
# A filter, if configured, is used by java.io.ObjectInputStream during
# deserialization of parameters sent to the JMX default agent to validate the
# contents of the stream.
# A filter is configured as a sequence of patterns, each pattern is either
# matched against the name of a class in the stream or defines a limit.
# Patterns are separated by ";" (semicolon).
# Whitespace is significant and is considered part of the pattern.
#
# If a pattern includes a "=", it sets a limit.
# If a limit appears more than once the last value is used.
# Limits are checked before classes regardless of the order in the sequence of patterns.
# If any of the limits are exceeded, the filter status is REJECTED.
#
# maxdepth=value - the maximum depth of a graph
# maxrefs=value - the maximum number of internal references
# maxbytes=value - the maximum number of bytes in the input stream
# maxarray=value - the maximum array length allowed
#
# Other patterns, from left to right, match the class or package name as
# returned from Class.getName.
# If the class is an array type, the class or package to be matched is the element type.
# Arrays of any number of dimensions are treated the same as the element type.
# For example, a pattern of "!example.Foo", rejects creation of any instance or
# array of example.Foo.
#
# If the pattern starts with "!", the status is REJECTED if the remaining pattern
# is matched; otherwise the status is ALLOWED if the pattern matches.
# If the pattern contains "/", the non-empty prefix up to the "/" is the module name;
# if the module name matches the module name of the class then
# the remaining pattern is matched with the class name.
# If there is no "/", the module name is not compared.
# If the pattern ends with ".**" it matches any class in the package and all subpackages.
# If the pattern ends with ".*" it matches any class in the package.
# If the pattern ends with "*", it matches any class with the pattern as a prefix.
# If the pattern is equal to the class name, it matches.
# Otherwise, the status is UNDECIDED.
#
# Ending with !* ensures we reject classes which are otherwise unmatched.
com.sun.management.jmxremote.serial.filter.pattern=\
java.lang.*;\
java.lang.reflect.Proxy;\
java.math.BigInteger;\
java.math.BigDecimal;\
java.util.*;\
javax.management.*;\
javax.management.modelmbean.*;\
javax.management.monitor.*;\
javax.management.openmbean.*;\
javax.management.relation.*;\
javax.management.remote.*;\
javax.management.remote.rmi.*;\
javax.management.timer.*;\
javax.rmi.ssl.*;\
java.rmi.MarshalledObject;\
java.rmi.dgc.*;\
java.rmi.server.*;\
javax.security.auth.Subject;\
!*

View File

@@ -1,147 +0,0 @@
############################################################
# Default Networking Configuration File
#
# This file may contain default values for the networking system properties.
# These values are only used when the system properties are not specified
# on the command line or set programmatically.
# For now, only the various proxy settings can be configured here.
############################################################
# Whether or not the DefaultProxySelector will default to System Proxy
# settings when they do exist.
# Set it to 'true' to enable this feature and check for platform
# specific proxy settings
# Note that the system properties that do explicitly set proxies
# (like http.proxyHost) do take precedence over the system settings
# even if java.net.useSystemProxies is set to true.
java.net.useSystemProxies=false
#------------------------------------------------------------------------
# Proxy configuration for the various protocol handlers.
# DO NOT uncomment these lines if you have set java.net.useSystemProxies
# to true as the protocol specific properties will take precedence over
# system settings.
#------------------------------------------------------------------------
# HTTP Proxy settings. proxyHost is the name of the proxy server
# (e.g. proxy.mydomain.com), proxyPort is the port number to use (default
# value is 80) and nonProxyHosts is a '|' separated list of hostnames which
# should be accessed directly, ignoring the proxy server (default value is
# localhost & 127.0.0.1).
#
# http.proxyHost=
# http.proxyPort=80
http.nonProxyHosts=localhost|127.*|[::1]
#
# HTTPS Proxy Settings. proxyHost is the name of the proxy server
# (e.g. proxy.mydomain.com), proxyPort is the port number to use (default
# value is 443). The HTTPS protocol handlers uses the http nonProxyHosts list.
#
# https.proxyHost=
# https.proxyPort=443
#
# FTP Proxy settings. proxyHost is the name of the proxy server
# (e.g. proxy.mydomain.com), proxyPort is the port number to use (default
# value is 80) and nonProxyHosts is a '|' separated list of hostnames which
# should be accessed directly, ignoring the proxy server (default value is
# localhost & 127.0.0.1).
#
# ftp.proxyHost=
# ftp.proxyPort=80
ftp.nonProxyHosts=localhost|127.*|[::1]
#
# Socks proxy settings. socksProxyHost is the name of the proxy server
# (e.g. socks.domain.com), socksProxyPort is the port number to use
# (default value is 1080)
#
# socksProxyHost=
# socksProxyPort=1080
#
# HTTP Keep Alive settings. remainingData is the maximum amount of data
# in kilobytes that will be cleaned off the underlying socket so that it
# can be reused (default value is 512K), queuedConnections is the maximum
# number of Keep Alive connections to be on the queue for clean up (default
# value is 10).
# http.KeepAlive.remainingData=512
# http.KeepAlive.queuedConnections=10
# Authentication Scheme restrictions for HTTP and HTTPS.
#
# In some environments certain authentication schemes may be undesirable
# when proxying HTTP or HTTPS. For example, "Basic" results in effectively the
# cleartext transmission of the user's password over the physical network.
# This section describes the mechanism for disabling authentication schemes
# based on the scheme name. Disabled schemes will be treated as if they are not
# supported by the implementation.
#
# The 'jdk.http.auth.tunneling.disabledSchemes' property lists the authentication
# schemes that will be disabled when tunneling HTTPS over a proxy, HTTP CONNECT.
# The 'jdk.http.auth.proxying.disabledSchemes' property lists the authentication
# schemes that will be disabled when proxying HTTP.
#
# In both cases the property is a comma-separated list of, case-insensitive,
# authentication scheme names, as defined by their relevant RFCs. An
# implementation may, but is not required to, support common schemes whose names
# include: 'Basic', 'Digest', 'NTLM', 'Kerberos', 'Negotiate'. A scheme that
# is not known, or not supported, by the implementation is ignored.
#
# Note: This property is currently used by the JDK Reference implementation. It
# is not guaranteed to be examined and used by other implementations.
#
#jdk.http.auth.proxying.disabledSchemes=
jdk.http.auth.tunneling.disabledSchemes=Basic
#
# Allow restricted HTTP request headers
#
# By default, the following request headers are not allowed to be set by user code
# in HttpRequests: "connection", "content-length", "expect", "host" and "upgrade".
# The 'jdk.httpclient.allowRestrictedHeaders' property allows one or more of these
# headers to be specified as a comma separated list to override the default restriction.
# The names are case-insensitive and white-space is ignored (removed before processing
# the list). Note, this capability is mostly intended for testing and isn't expected
# to be used in real deployments. Protocol errors or other undefined behavior is likely
# to occur when using them. The property is not set by default.
# Note also, that there may be other headers that are restricted from being set
# depending on the context. This includes the "Authorization" header when the
# relevant HttpClient has an authenticator set. These restrictions cannot be
# overridden by this property.
#
# jdk.httpclient.allowRestrictedHeaders=host
#
#
# Transparent NTLM HTTP authentication mode on Windows. Transparent authentication
# can be used for the NTLM scheme, where the security credentials based on the
# currently logged in user's name and password can be obtained directly from the
# operating system, without prompting the user. This property has three possible
# values which regulate the behavior as shown below. Other unrecognized values
# are handled the same as 'disabled'. Note, that NTLM is not considered to be a
# strongly secure authentication scheme and care should be taken before enabling
# this mechanism.
#
# Transparent authentication never used.
#jdk.http.ntlm.transparentAuth=disabled
#
# Enabled for all hosts.
#jdk.http.ntlm.transparentAuth=allHosts
#
# Enabled for hosts that are trusted in Windows Internet settings
#jdk.http.ntlm.transparentAuth=trustedHosts
#
jdk.http.ntlm.transparentAuth=disabled
#
# Default directory where automatically bound Unix domain server
# sockets are stored. Sockets are automatically bound when bound
# with a null address.
#
# On Unix the search order to determine this directory is:
#
# 1. System property jdk.net.unixdomain.tmpdir
#
# 2. Networking property jdk.net.unixdomain.tmpdir specified
# in this file (effective default)
#
# 3. System property java.io.tmpdir
#
jdk.net.unixdomain.tmpdir=/tmp

View File

@@ -1,30 +0,0 @@
#
# Configuration file to enable InfiniBand Sockets Direct Protocol.
#
# Each line that does not start with a comment (#) is a rule to indicate when
# the SDP transport protocol should be used. The format of a rule is as follows:
# ("bind"|"connect") 1*LWSP-char (hostname|ipaddress["/"prefix]) 1*LWSP-char ("*"|port)["-"("*"|port)]
#
# A "bind" rule indicates that the SDP protocol transport should be used when
# a TCP socket binds to an address/port that matches the rule. A "connect" rule
# indicates that the SDP protocol transport should be used when an unbound
# TCP socket attempts to connect to an address/port that matches the rule.
# Addresses may be specified as hostnames or literal Internet Protocol (IP)
# addresses. When a literal IP address is used then a prefix length may be used
# to indicate the number of bits for matching (useful when a block of addresses
# or subnet is allocated to the InfiniBand fabric).
# Use SDP for all sockets that bind to specific local addresses
#bind 192.168.1.1 *
#bind fe80::21b:24ff:fe3d:7896 *
# Use SDP for all sockets that bind to the wildcard address in a port range
#bind 0.0.0.0 5000-5999
#bind ::0 5000-5999
# Use SDP when connecting to all application services on 192.168.1.*
#connect 192.168.1.0/24 1024-*
# Use SDP when connecting to the http server or MySQL database on hpccluster.
#connect hpccluster.foo.com 80
#connect hpccluster.foo.com 3306

View File

@@ -1,46 +0,0 @@
//
// This system policy file grants a set of default permissions to all domains
// and can be configured to grant additional permissions to modules and other
// code sources. The code source URL scheme for modules linked into a
// run-time image is "jrt".
//
// For example, to grant permission to read the "foo" property to the module
// "com.greetings", the grant entry is:
//
// grant codeBase "jrt:/com.greetings" {
// permission java.util.PropertyPermission "foo", "read";
// };
//
// default permissions granted to all domains
grant {
// allows anyone to listen on dynamic ports
permission java.net.SocketPermission "localhost:0", "listen";
// "standard" properties that can be read by anyone
permission java.util.PropertyPermission "java.version", "read";
permission java.util.PropertyPermission "java.vendor", "read";
permission java.util.PropertyPermission "java.vendor.url", "read";
permission java.util.PropertyPermission "java.class.version", "read";
permission java.util.PropertyPermission "os.name", "read";
permission java.util.PropertyPermission "os.version", "read";
permission java.util.PropertyPermission "os.arch", "read";
permission java.util.PropertyPermission "file.separator", "read";
permission java.util.PropertyPermission "path.separator", "read";
permission java.util.PropertyPermission "line.separator", "read";
permission java.util.PropertyPermission
"java.specification.version", "read";
permission java.util.PropertyPermission
"java.specification.maintenance.version", "read";
permission java.util.PropertyPermission "java.specification.vendor", "read";
permission java.util.PropertyPermission "java.specification.name", "read";
permission java.util.PropertyPermission
"java.vm.specification.version", "read";
permission java.util.PropertyPermission
"java.vm.specification.vendor", "read";
permission java.util.PropertyPermission
"java.vm.specification.name", "read";
permission java.util.PropertyPermission "java.vm.version", "read";
permission java.util.PropertyPermission "java.vm.vendor", "read";
permission java.util.PropertyPermission "java.vm.name", "read";
};

View File

@@ -1,54 +0,0 @@
Java(TM) Cryptography Extension Policy Files
for the Java(TM) Platform, Standard Edition Runtime Environment
README
------------------------------------------------------------------------
Import and export control rules on cryptographic software vary from
country to country. The Java Cryptography Extension (JCE) architecture
allows flexible cryptographic key strength to be configured via the
jurisdiction policy files which are referenced by the "crypto.policy"
security property in the <java-home>/conf/security/java.security file.
By default, Java provides two different sets of cryptographic policy
files:
unlimited: These policy files contain no restrictions on cryptographic
strengths or algorithms
limited: These policy files contain more restricted cryptographic
strengths
These files reside in <java-home>/conf/security/policy in the "unlimited"
or "limited" subdirectories respectively.
Each subdirectory contains a complete policy configuration,
and subdirectories can be added/edited/removed to reflect your
import or export control product requirements.
Within a subdirectory, the effective policy is the combined minimum
permissions of the grant statements in the file(s) matching the filename
pattern "default_*.policy". At least one grant is required. For example:
limited = Export (all) + Import (limited) = Limited
unlimited = Export (all) + Import (all) = Unlimited
The effective exemption policy is the combined minimum permissions
of the grant statements in the file(s) matching the filename pattern
"exempt_*.policy". Exemption grants are optional. For example:
limited = grants exemption permissions, by which the
effective policy can be circumvented.
e.g. KeyRecovery/KeyEscrow/KeyWeakening.
Please see the Java Cryptography Architecture (JCA) documentation for
additional information on these files and formats.
YOU ARE ADVISED TO CONSULT YOUR EXPORT/IMPORT CONTROL COUNSEL OR ATTORNEY
TO DETERMINE THE EXACT REQUIREMENTS.
Please note that the JCE for Java SE, including the JCE framework,
cryptographic policy files, and standard JCE providers provided with
the Java SE, have been reviewed and approved for export as mass market
encryption item by the US Bureau of Industry and Security.

View File

@@ -1,6 +0,0 @@
// Default US Export policy file.
grant {
// There is no restriction to any algorithms.
permission javax.crypto.CryptoAllPermission;
};

View File

@@ -1,14 +0,0 @@
// Some countries have import limits on crypto strength. This policy file
// is worldwide importable.
grant {
permission javax.crypto.CryptoPermission "DES", 64;
permission javax.crypto.CryptoPermission "DESede", *;
permission javax.crypto.CryptoPermission "RC2", 128,
"javax.crypto.spec.RC2ParameterSpec", 128;
permission javax.crypto.CryptoPermission "RC4", 128;
permission javax.crypto.CryptoPermission "RC5", 128,
"javax.crypto.spec.RC5ParameterSpec", *, 12, *;
permission javax.crypto.CryptoPermission "RSA", *;
permission javax.crypto.CryptoPermission *, 128;
};

View File

@@ -1,13 +0,0 @@
// Some countries have import limits on crypto strength, but may allow for
// these exemptions if the exemption mechanism is used.
grant {
// There is no restriction to any algorithms if KeyRecovery is enforced.
permission javax.crypto.CryptoPermission *, "KeyRecovery";
// There is no restriction to any algorithms if KeyEscrow is enforced.
permission javax.crypto.CryptoPermission *, "KeyEscrow";
// There is no restriction to any algorithms if KeyWeakening is enforced.
permission javax.crypto.CryptoPermission *, "KeyWeakening";
};

View File

@@ -1,6 +0,0 @@
// Default US Export policy file.
grant {
// There is no restriction to any algorithms.
permission javax.crypto.CryptoAllPermission;
};

View File

@@ -1,6 +0,0 @@
// Country-specific policy file for countries with no limits on crypto strength.
grant {
// There is no restriction to any algorithms.
permission javax.crypto.CryptoAllPermission;
};

View File

@@ -1,39 +0,0 @@
############################################################
# Sound Configuration File
############################################################
#
# This properties file is used to specify default service
# providers for javax.sound.midi.MidiSystem and
# javax.sound.sampled.AudioSystem.
#
# The following keys are recognized by MidiSystem methods:
#
# javax.sound.midi.Receiver
# javax.sound.midi.Sequencer
# javax.sound.midi.Synthesizer
# javax.sound.midi.Transmitter
#
# The following keys are recognized by AudioSystem methods:
#
# javax.sound.sampled.Clip
# javax.sound.sampled.Port
# javax.sound.sampled.SourceDataLine
# javax.sound.sampled.TargetDataLine
#
# The values specify the full class name of the service
# provider, or the device name.
#
# See the class descriptions for details.
#
# Example 1:
# Use MyDeviceProvider as default for SourceDataLines:
# javax.sound.sampled.SourceDataLine=com.xyz.MyDeviceProvider
#
# Example 2:
# Specify the default Synthesizer by its name "InternalSynth".
# javax.sound.midi.Synthesizer=#InternalSynth
#
# Example 3:
# Specify the default Receiver by provider and name:
# javax.sound.midi.Receiver=com.sun.media.sound.MidiProvider#SunMIDI1
#

View File

@@ -1,588 +0,0 @@
/*
* Copyright (c) 2004, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
#ifndef CLASSFILE_CONSTANTS_H
#define CLASSFILE_CONSTANTS_H
#ifdef __cplusplus
extern "C" {
#endif
/* Classfile version number for this information */
#define JVM_CLASSFILE_MAJOR_VERSION 65
#define JVM_CLASSFILE_MINOR_VERSION 0
/* Flags */
enum {
JVM_ACC_PUBLIC = 0x0001,
JVM_ACC_PRIVATE = 0x0002,
JVM_ACC_PROTECTED = 0x0004,
JVM_ACC_STATIC = 0x0008,
JVM_ACC_FINAL = 0x0010,
JVM_ACC_SYNCHRONIZED = 0x0020,
JVM_ACC_SUPER = 0x0020,
JVM_ACC_VOLATILE = 0x0040,
JVM_ACC_BRIDGE = 0x0040,
JVM_ACC_TRANSIENT = 0x0080,
JVM_ACC_VARARGS = 0x0080,
JVM_ACC_NATIVE = 0x0100,
JVM_ACC_INTERFACE = 0x0200,
JVM_ACC_ABSTRACT = 0x0400,
JVM_ACC_STRICT = 0x0800,
JVM_ACC_SYNTHETIC = 0x1000,
JVM_ACC_ANNOTATION = 0x2000,
JVM_ACC_ENUM = 0x4000,
JVM_ACC_MODULE = 0x8000
};
#define JVM_ACC_PUBLIC_BIT 0
#define JVM_ACC_PRIVATE_BIT 1
#define JVM_ACC_PROTECTED_BIT 2
#define JVM_ACC_STATIC_BIT 3
#define JVM_ACC_FINAL_BIT 4
#define JVM_ACC_SYNCHRONIZED_BIT 5
#define JVM_ACC_SUPER_BIT 5
#define JVM_ACC_VOLATILE_BIT 6
#define JVM_ACC_BRIDGE_BIT 6
#define JVM_ACC_TRANSIENT_BIT 7
#define JVM_ACC_VARARGS_BIT 7
#define JVM_ACC_NATIVE_BIT 8
#define JVM_ACC_INTERFACE_BIT 9
#define JVM_ACC_ABSTRACT_BIT 10
#define JVM_ACC_STRICT_BIT 11
#define JVM_ACC_SYNTHETIC_BIT 12
#define JVM_ACC_ANNOTATION_BIT 13
#define JVM_ACC_ENUM_BIT 14
/* Used in newarray instruction. */
enum {
JVM_T_BOOLEAN = 4,
JVM_T_CHAR = 5,
JVM_T_FLOAT = 6,
JVM_T_DOUBLE = 7,
JVM_T_BYTE = 8,
JVM_T_SHORT = 9,
JVM_T_INT = 10,
JVM_T_LONG = 11
};
/* Constant Pool Entries */
enum {
JVM_CONSTANT_Utf8 = 1,
JVM_CONSTANT_Unicode = 2, /* unused */
JVM_CONSTANT_Integer = 3,
JVM_CONSTANT_Float = 4,
JVM_CONSTANT_Long = 5,
JVM_CONSTANT_Double = 6,
JVM_CONSTANT_Class = 7,
JVM_CONSTANT_String = 8,
JVM_CONSTANT_Fieldref = 9,
JVM_CONSTANT_Methodref = 10,
JVM_CONSTANT_InterfaceMethodref = 11,
JVM_CONSTANT_NameAndType = 12,
JVM_CONSTANT_MethodHandle = 15, // JSR 292
JVM_CONSTANT_MethodType = 16, // JSR 292
JVM_CONSTANT_Dynamic = 17,
JVM_CONSTANT_InvokeDynamic = 18,
JVM_CONSTANT_Module = 19,
JVM_CONSTANT_Package = 20,
JVM_CONSTANT_ExternalMax = 20
};
/* JVM_CONSTANT_MethodHandle subtypes */
enum {
JVM_REF_getField = 1,
JVM_REF_getStatic = 2,
JVM_REF_putField = 3,
JVM_REF_putStatic = 4,
JVM_REF_invokeVirtual = 5,
JVM_REF_invokeStatic = 6,
JVM_REF_invokeSpecial = 7,
JVM_REF_newInvokeSpecial = 8,
JVM_REF_invokeInterface = 9
};
/* StackMapTable type item numbers */
enum {
JVM_ITEM_Top = 0,
JVM_ITEM_Integer = 1,
JVM_ITEM_Float = 2,
JVM_ITEM_Double = 3,
JVM_ITEM_Long = 4,
JVM_ITEM_Null = 5,
JVM_ITEM_UninitializedThis = 6,
JVM_ITEM_Object = 7,
JVM_ITEM_Uninitialized = 8
};
/* Type signatures */
enum {
JVM_SIGNATURE_SLASH = '/',
JVM_SIGNATURE_DOT = '.',
JVM_SIGNATURE_SPECIAL = '<',
JVM_SIGNATURE_ENDSPECIAL = '>',
JVM_SIGNATURE_ARRAY = '[',
JVM_SIGNATURE_BYTE = 'B',
JVM_SIGNATURE_CHAR = 'C',
JVM_SIGNATURE_CLASS = 'L',
JVM_SIGNATURE_ENDCLASS = ';',
JVM_SIGNATURE_ENUM = 'E',
JVM_SIGNATURE_FLOAT = 'F',
JVM_SIGNATURE_DOUBLE = 'D',
JVM_SIGNATURE_FUNC = '(',
JVM_SIGNATURE_ENDFUNC = ')',
JVM_SIGNATURE_INT = 'I',
JVM_SIGNATURE_LONG = 'J',
JVM_SIGNATURE_SHORT = 'S',
JVM_SIGNATURE_VOID = 'V',
JVM_SIGNATURE_BOOLEAN = 'Z'
};
/* Opcodes */
enum {
JVM_OPC_nop = 0,
JVM_OPC_aconst_null = 1,
JVM_OPC_iconst_m1 = 2,
JVM_OPC_iconst_0 = 3,
JVM_OPC_iconst_1 = 4,
JVM_OPC_iconst_2 = 5,
JVM_OPC_iconst_3 = 6,
JVM_OPC_iconst_4 = 7,
JVM_OPC_iconst_5 = 8,
JVM_OPC_lconst_0 = 9,
JVM_OPC_lconst_1 = 10,
JVM_OPC_fconst_0 = 11,
JVM_OPC_fconst_1 = 12,
JVM_OPC_fconst_2 = 13,
JVM_OPC_dconst_0 = 14,
JVM_OPC_dconst_1 = 15,
JVM_OPC_bipush = 16,
JVM_OPC_sipush = 17,
JVM_OPC_ldc = 18,
JVM_OPC_ldc_w = 19,
JVM_OPC_ldc2_w = 20,
JVM_OPC_iload = 21,
JVM_OPC_lload = 22,
JVM_OPC_fload = 23,
JVM_OPC_dload = 24,
JVM_OPC_aload = 25,
JVM_OPC_iload_0 = 26,
JVM_OPC_iload_1 = 27,
JVM_OPC_iload_2 = 28,
JVM_OPC_iload_3 = 29,
JVM_OPC_lload_0 = 30,
JVM_OPC_lload_1 = 31,
JVM_OPC_lload_2 = 32,
JVM_OPC_lload_3 = 33,
JVM_OPC_fload_0 = 34,
JVM_OPC_fload_1 = 35,
JVM_OPC_fload_2 = 36,
JVM_OPC_fload_3 = 37,
JVM_OPC_dload_0 = 38,
JVM_OPC_dload_1 = 39,
JVM_OPC_dload_2 = 40,
JVM_OPC_dload_3 = 41,
JVM_OPC_aload_0 = 42,
JVM_OPC_aload_1 = 43,
JVM_OPC_aload_2 = 44,
JVM_OPC_aload_3 = 45,
JVM_OPC_iaload = 46,
JVM_OPC_laload = 47,
JVM_OPC_faload = 48,
JVM_OPC_daload = 49,
JVM_OPC_aaload = 50,
JVM_OPC_baload = 51,
JVM_OPC_caload = 52,
JVM_OPC_saload = 53,
JVM_OPC_istore = 54,
JVM_OPC_lstore = 55,
JVM_OPC_fstore = 56,
JVM_OPC_dstore = 57,
JVM_OPC_astore = 58,
JVM_OPC_istore_0 = 59,
JVM_OPC_istore_1 = 60,
JVM_OPC_istore_2 = 61,
JVM_OPC_istore_3 = 62,
JVM_OPC_lstore_0 = 63,
JVM_OPC_lstore_1 = 64,
JVM_OPC_lstore_2 = 65,
JVM_OPC_lstore_3 = 66,
JVM_OPC_fstore_0 = 67,
JVM_OPC_fstore_1 = 68,
JVM_OPC_fstore_2 = 69,
JVM_OPC_fstore_3 = 70,
JVM_OPC_dstore_0 = 71,
JVM_OPC_dstore_1 = 72,
JVM_OPC_dstore_2 = 73,
JVM_OPC_dstore_3 = 74,
JVM_OPC_astore_0 = 75,
JVM_OPC_astore_1 = 76,
JVM_OPC_astore_2 = 77,
JVM_OPC_astore_3 = 78,
JVM_OPC_iastore = 79,
JVM_OPC_lastore = 80,
JVM_OPC_fastore = 81,
JVM_OPC_dastore = 82,
JVM_OPC_aastore = 83,
JVM_OPC_bastore = 84,
JVM_OPC_castore = 85,
JVM_OPC_sastore = 86,
JVM_OPC_pop = 87,
JVM_OPC_pop2 = 88,
JVM_OPC_dup = 89,
JVM_OPC_dup_x1 = 90,
JVM_OPC_dup_x2 = 91,
JVM_OPC_dup2 = 92,
JVM_OPC_dup2_x1 = 93,
JVM_OPC_dup2_x2 = 94,
JVM_OPC_swap = 95,
JVM_OPC_iadd = 96,
JVM_OPC_ladd = 97,
JVM_OPC_fadd = 98,
JVM_OPC_dadd = 99,
JVM_OPC_isub = 100,
JVM_OPC_lsub = 101,
JVM_OPC_fsub = 102,
JVM_OPC_dsub = 103,
JVM_OPC_imul = 104,
JVM_OPC_lmul = 105,
JVM_OPC_fmul = 106,
JVM_OPC_dmul = 107,
JVM_OPC_idiv = 108,
JVM_OPC_ldiv = 109,
JVM_OPC_fdiv = 110,
JVM_OPC_ddiv = 111,
JVM_OPC_irem = 112,
JVM_OPC_lrem = 113,
JVM_OPC_frem = 114,
JVM_OPC_drem = 115,
JVM_OPC_ineg = 116,
JVM_OPC_lneg = 117,
JVM_OPC_fneg = 118,
JVM_OPC_dneg = 119,
JVM_OPC_ishl = 120,
JVM_OPC_lshl = 121,
JVM_OPC_ishr = 122,
JVM_OPC_lshr = 123,
JVM_OPC_iushr = 124,
JVM_OPC_lushr = 125,
JVM_OPC_iand = 126,
JVM_OPC_land = 127,
JVM_OPC_ior = 128,
JVM_OPC_lor = 129,
JVM_OPC_ixor = 130,
JVM_OPC_lxor = 131,
JVM_OPC_iinc = 132,
JVM_OPC_i2l = 133,
JVM_OPC_i2f = 134,
JVM_OPC_i2d = 135,
JVM_OPC_l2i = 136,
JVM_OPC_l2f = 137,
JVM_OPC_l2d = 138,
JVM_OPC_f2i = 139,
JVM_OPC_f2l = 140,
JVM_OPC_f2d = 141,
JVM_OPC_d2i = 142,
JVM_OPC_d2l = 143,
JVM_OPC_d2f = 144,
JVM_OPC_i2b = 145,
JVM_OPC_i2c = 146,
JVM_OPC_i2s = 147,
JVM_OPC_lcmp = 148,
JVM_OPC_fcmpl = 149,
JVM_OPC_fcmpg = 150,
JVM_OPC_dcmpl = 151,
JVM_OPC_dcmpg = 152,
JVM_OPC_ifeq = 153,
JVM_OPC_ifne = 154,
JVM_OPC_iflt = 155,
JVM_OPC_ifge = 156,
JVM_OPC_ifgt = 157,
JVM_OPC_ifle = 158,
JVM_OPC_if_icmpeq = 159,
JVM_OPC_if_icmpne = 160,
JVM_OPC_if_icmplt = 161,
JVM_OPC_if_icmpge = 162,
JVM_OPC_if_icmpgt = 163,
JVM_OPC_if_icmple = 164,
JVM_OPC_if_acmpeq = 165,
JVM_OPC_if_acmpne = 166,
JVM_OPC_goto = 167,
JVM_OPC_jsr = 168,
JVM_OPC_ret = 169,
JVM_OPC_tableswitch = 170,
JVM_OPC_lookupswitch = 171,
JVM_OPC_ireturn = 172,
JVM_OPC_lreturn = 173,
JVM_OPC_freturn = 174,
JVM_OPC_dreturn = 175,
JVM_OPC_areturn = 176,
JVM_OPC_return = 177,
JVM_OPC_getstatic = 178,
JVM_OPC_putstatic = 179,
JVM_OPC_getfield = 180,
JVM_OPC_putfield = 181,
JVM_OPC_invokevirtual = 182,
JVM_OPC_invokespecial = 183,
JVM_OPC_invokestatic = 184,
JVM_OPC_invokeinterface = 185,
JVM_OPC_invokedynamic = 186,
JVM_OPC_new = 187,
JVM_OPC_newarray = 188,
JVM_OPC_anewarray = 189,
JVM_OPC_arraylength = 190,
JVM_OPC_athrow = 191,
JVM_OPC_checkcast = 192,
JVM_OPC_instanceof = 193,
JVM_OPC_monitorenter = 194,
JVM_OPC_monitorexit = 195,
JVM_OPC_wide = 196,
JVM_OPC_multianewarray = 197,
JVM_OPC_ifnull = 198,
JVM_OPC_ifnonnull = 199,
JVM_OPC_goto_w = 200,
JVM_OPC_jsr_w = 201,
JVM_OPC_MAX = 201
};
/* Opcode length initializer, use with something like:
* unsigned char opcode_length[JVM_OPC_MAX+1] = JVM_OPCODE_LENGTH_INITIALIZER;
*/
#define JVM_OPCODE_LENGTH_INITIALIZER { \
1, /* nop */ \
1, /* aconst_null */ \
1, /* iconst_m1 */ \
1, /* iconst_0 */ \
1, /* iconst_1 */ \
1, /* iconst_2 */ \
1, /* iconst_3 */ \
1, /* iconst_4 */ \
1, /* iconst_5 */ \
1, /* lconst_0 */ \
1, /* lconst_1 */ \
1, /* fconst_0 */ \
1, /* fconst_1 */ \
1, /* fconst_2 */ \
1, /* dconst_0 */ \
1, /* dconst_1 */ \
2, /* bipush */ \
3, /* sipush */ \
2, /* ldc */ \
3, /* ldc_w */ \
3, /* ldc2_w */ \
2, /* iload */ \
2, /* lload */ \
2, /* fload */ \
2, /* dload */ \
2, /* aload */ \
1, /* iload_0 */ \
1, /* iload_1 */ \
1, /* iload_2 */ \
1, /* iload_3 */ \
1, /* lload_0 */ \
1, /* lload_1 */ \
1, /* lload_2 */ \
1, /* lload_3 */ \
1, /* fload_0 */ \
1, /* fload_1 */ \
1, /* fload_2 */ \
1, /* fload_3 */ \
1, /* dload_0 */ \
1, /* dload_1 */ \
1, /* dload_2 */ \
1, /* dload_3 */ \
1, /* aload_0 */ \
1, /* aload_1 */ \
1, /* aload_2 */ \
1, /* aload_3 */ \
1, /* iaload */ \
1, /* laload */ \
1, /* faload */ \
1, /* daload */ \
1, /* aaload */ \
1, /* baload */ \
1, /* caload */ \
1, /* saload */ \
2, /* istore */ \
2, /* lstore */ \
2, /* fstore */ \
2, /* dstore */ \
2, /* astore */ \
1, /* istore_0 */ \
1, /* istore_1 */ \
1, /* istore_2 */ \
1, /* istore_3 */ \
1, /* lstore_0 */ \
1, /* lstore_1 */ \
1, /* lstore_2 */ \
1, /* lstore_3 */ \
1, /* fstore_0 */ \
1, /* fstore_1 */ \
1, /* fstore_2 */ \
1, /* fstore_3 */ \
1, /* dstore_0 */ \
1, /* dstore_1 */ \
1, /* dstore_2 */ \
1, /* dstore_3 */ \
1, /* astore_0 */ \
1, /* astore_1 */ \
1, /* astore_2 */ \
1, /* astore_3 */ \
1, /* iastore */ \
1, /* lastore */ \
1, /* fastore */ \
1, /* dastore */ \
1, /* aastore */ \
1, /* bastore */ \
1, /* castore */ \
1, /* sastore */ \
1, /* pop */ \
1, /* pop2 */ \
1, /* dup */ \
1, /* dup_x1 */ \
1, /* dup_x2 */ \
1, /* dup2 */ \
1, /* dup2_x1 */ \
1, /* dup2_x2 */ \
1, /* swap */ \
1, /* iadd */ \
1, /* ladd */ \
1, /* fadd */ \
1, /* dadd */ \
1, /* isub */ \
1, /* lsub */ \
1, /* fsub */ \
1, /* dsub */ \
1, /* imul */ \
1, /* lmul */ \
1, /* fmul */ \
1, /* dmul */ \
1, /* idiv */ \
1, /* ldiv */ \
1, /* fdiv */ \
1, /* ddiv */ \
1, /* irem */ \
1, /* lrem */ \
1, /* frem */ \
1, /* drem */ \
1, /* ineg */ \
1, /* lneg */ \
1, /* fneg */ \
1, /* dneg */ \
1, /* ishl */ \
1, /* lshl */ \
1, /* ishr */ \
1, /* lshr */ \
1, /* iushr */ \
1, /* lushr */ \
1, /* iand */ \
1, /* land */ \
1, /* ior */ \
1, /* lor */ \
1, /* ixor */ \
1, /* lxor */ \
3, /* iinc */ \
1, /* i2l */ \
1, /* i2f */ \
1, /* i2d */ \
1, /* l2i */ \
1, /* l2f */ \
1, /* l2d */ \
1, /* f2i */ \
1, /* f2l */ \
1, /* f2d */ \
1, /* d2i */ \
1, /* d2l */ \
1, /* d2f */ \
1, /* i2b */ \
1, /* i2c */ \
1, /* i2s */ \
1, /* lcmp */ \
1, /* fcmpl */ \
1, /* fcmpg */ \
1, /* dcmpl */ \
1, /* dcmpg */ \
3, /* ifeq */ \
3, /* ifne */ \
3, /* iflt */ \
3, /* ifge */ \
3, /* ifgt */ \
3, /* ifle */ \
3, /* if_icmpeq */ \
3, /* if_icmpne */ \
3, /* if_icmplt */ \
3, /* if_icmpge */ \
3, /* if_icmpgt */ \
3, /* if_icmple */ \
3, /* if_acmpeq */ \
3, /* if_acmpne */ \
3, /* goto */ \
3, /* jsr */ \
2, /* ret */ \
99, /* tableswitch */ \
99, /* lookupswitch */ \
1, /* ireturn */ \
1, /* lreturn */ \
1, /* freturn */ \
1, /* dreturn */ \
1, /* areturn */ \
1, /* return */ \
3, /* getstatic */ \
3, /* putstatic */ \
3, /* getfield */ \
3, /* putfield */ \
3, /* invokevirtual */ \
3, /* invokespecial */ \
3, /* invokestatic */ \
5, /* invokeinterface */ \
5, /* invokedynamic */ \
3, /* new */ \
2, /* newarray */ \
3, /* anewarray */ \
1, /* arraylength */ \
1, /* athrow */ \
3, /* checkcast */ \
3, /* instanceof */ \
1, /* monitorenter */ \
1, /* monitorexit */ \
0, /* wide */ \
4, /* multianewarray */ \
3, /* ifnull */ \
3, /* ifnonnull */ \
5, /* goto_w */ \
5 /* jsr_w */ \
}
#ifdef __cplusplus
} /* extern "C" */
#endif /* __cplusplus */
#endif /* CLASSFILE_CONSTANTS */

View File

@@ -1,356 +0,0 @@
/*
* Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
#ifndef _JAVASOFT_JAWT_H_
#define _JAVASOFT_JAWT_H_
#include "jni.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* AWT native interface.
*
* The AWT native interface allows a native C or C++ application a means
* by which to access native structures in AWT. This is to facilitate moving
* legacy C and C++ applications to Java and to target the needs of the
* developers who need to do their own native rendering to canvases
* for performance or other reasons.
*
* Conversely it also provides mechanisms for an application which already
* has a native window to provide that to AWT for AWT rendering.
*
* Since every platform may be different in its native data structures
* and APIs for windowing systems the application must necessarily
* provided per-platform source and compile and deliver per-platform
* native code to use this API.
*
* These interfaces are not part of the Java SE specification and
* a VM is not required to implement this API. However it is strongly
* recommended that all implementations which support headful AWT
* also support these interfaces.
*
*/
/*
* AWT Native Drawing Surface (JAWT_DrawingSurface).
*
* For each platform, there is a native drawing surface structure. This
* platform-specific structure can be found in jawt_md.h. It is recommended
* that additional platforms follow the same model. It is also recommended
* that VMs on all platforms support the existing structures in jawt_md.h.
*
*******************
* EXAMPLE OF USAGE:
*******************
*
* In Win32, a programmer wishes to access the HWND of a canvas to perform
* native rendering into it. The programmer has declared the paint() method
* for their canvas subclass to be native:
*
*
* MyCanvas.java:
*
* import java.awt.*;
*
* public class MyCanvas extends Canvas {
*
* static {
* System.loadLibrary("mylib");
* }
*
* public native void paint(Graphics g);
* }
*
*
* myfile.c:
*
* #include "jawt_md.h"
* #include <assert.h>
*
* JNIEXPORT void JNICALL
* Java_MyCanvas_paint(JNIEnv* env, jobject canvas, jobject graphics)
* {
* JAWT awt;
* JAWT_DrawingSurface* ds;
* JAWT_DrawingSurfaceInfo* dsi;
* JAWT_Win32DrawingSurfaceInfo* dsi_win;
* jboolean result;
* jint lock;
*
* // Get the AWT. Request version 9 to access features in that release.
* awt.version = JAWT_VERSION_9;
* result = JAWT_GetAWT(env, &awt);
* assert(result != JNI_FALSE);
*
* // Get the drawing surface
* ds = awt.GetDrawingSurface(env, canvas);
* assert(ds != NULL);
*
* // Lock the drawing surface
* lock = ds->Lock(ds);
* assert((lock & JAWT_LOCK_ERROR) == 0);
*
* // Get the drawing surface info
* dsi = ds->GetDrawingSurfaceInfo(ds);
*
* // Get the platform-specific drawing info
* dsi_win = (JAWT_Win32DrawingSurfaceInfo*)dsi->platformInfo;
*
* //////////////////////////////
* // !!! DO PAINTING HERE !!! //
* //////////////////////////////
*
* // Free the drawing surface info
* ds->FreeDrawingSurfaceInfo(dsi);
*
* // Unlock the drawing surface
* ds->Unlock(ds);
*
* // Free the drawing surface
* awt.FreeDrawingSurface(ds);
* }
*
*/
/*
* JAWT_Rectangle
* Structure for a native rectangle.
*/
typedef struct jawt_Rectangle {
jint x;
jint y;
jint width;
jint height;
} JAWT_Rectangle;
struct jawt_DrawingSurface;
/*
* JAWT_DrawingSurfaceInfo
* Structure for containing the underlying drawing information of a component.
*/
typedef struct jawt_DrawingSurfaceInfo {
/*
* Pointer to the platform-specific information. This can be safely
* cast to a JAWT_Win32DrawingSurfaceInfo on Windows or a
* JAWT_X11DrawingSurfaceInfo on Linux and Solaris. On Mac OS X this is a
* pointer to a NSObject that conforms to the JAWT_SurfaceLayers
* protocol. See jawt_md.h for details.
*/
void* platformInfo;
/* Cached pointer to the underlying drawing surface */
struct jawt_DrawingSurface* ds;
/* Bounding rectangle of the drawing surface */
JAWT_Rectangle bounds;
/* Number of rectangles in the clip */
jint clipSize;
/* Clip rectangle array */
JAWT_Rectangle* clip;
} JAWT_DrawingSurfaceInfo;
#define JAWT_LOCK_ERROR 0x00000001
#define JAWT_LOCK_CLIP_CHANGED 0x00000002
#define JAWT_LOCK_BOUNDS_CHANGED 0x00000004
#define JAWT_LOCK_SURFACE_CHANGED 0x00000008
/*
* JAWT_DrawingSurface
* Structure for containing the underlying drawing information of a component.
* All operations on a JAWT_DrawingSurface MUST be performed from the same
* thread as the call to GetDrawingSurface.
*/
typedef struct jawt_DrawingSurface {
/*
* Cached reference to the Java environment of the calling thread.
* If Lock(), Unlock(), GetDrawingSurfaceInfo() or
* FreeDrawingSurfaceInfo() are called from a different thread,
* this data member should be set before calling those functions.
*/
JNIEnv* env;
/* Cached reference to the target object */
jobject target;
/*
* Lock the surface of the target component for native rendering.
* When finished drawing, the surface must be unlocked with
* Unlock(). This function returns a bitmask with one or more of the
* following values:
*
* JAWT_LOCK_ERROR - When an error has occurred and the surface could not
* be locked.
*
* JAWT_LOCK_CLIP_CHANGED - When the clip region has changed.
*
* JAWT_LOCK_BOUNDS_CHANGED - When the bounds of the surface have changed.
*
* JAWT_LOCK_SURFACE_CHANGED - When the surface itself has changed
*/
jint (JNICALL *Lock)
(struct jawt_DrawingSurface* ds);
/*
* Get the drawing surface info.
* The value returned may be cached, but the values may change if
* additional calls to Lock() or Unlock() are made.
* Lock() must be called before this can return a valid value.
* Returns NULL if an error has occurred.
* When finished with the returned value, FreeDrawingSurfaceInfo must be
* called.
*/
JAWT_DrawingSurfaceInfo* (JNICALL *GetDrawingSurfaceInfo)
(struct jawt_DrawingSurface* ds);
/*
* Free the drawing surface info.
*/
void (JNICALL *FreeDrawingSurfaceInfo)
(JAWT_DrawingSurfaceInfo* dsi);
/*
* Unlock the drawing surface of the target component for native rendering.
*/
void (JNICALL *Unlock)
(struct jawt_DrawingSurface* ds);
} JAWT_DrawingSurface;
/*
* JAWT
* Structure for containing native AWT functions.
*/
typedef struct jawt {
/*
* Version of this structure. This must always be set before
* calling JAWT_GetAWT(). It affects the functions returned.
* Must be one of the known pre-defined versions.
*/
jint version;
/*
* Return a drawing surface from a target jobject. This value
* may be cached.
* Returns NULL if an error has occurred.
* Target must be a java.awt.Component (should be a Canvas
* or Window for native rendering).
* FreeDrawingSurface() must be called when finished with the
* returned JAWT_DrawingSurface.
*/
JAWT_DrawingSurface* (JNICALL *GetDrawingSurface)
(JNIEnv* env, jobject target);
/*
* Free the drawing surface allocated in GetDrawingSurface.
*/
void (JNICALL *FreeDrawingSurface)
(JAWT_DrawingSurface* ds);
/*
* Since 1.4
* Locks the entire AWT for synchronization purposes
*/
void (JNICALL *Lock)(JNIEnv* env);
/*
* Since 1.4
* Unlocks the entire AWT for synchronization purposes
*/
void (JNICALL *Unlock)(JNIEnv* env);
/*
* Since 1.4
* Returns a reference to a java.awt.Component from a native
* platform handle. On Windows, this corresponds to an HWND;
* on Solaris and Linux, this is a Drawable. For other platforms,
* see the appropriate machine-dependent header file for a description.
* The reference returned by this function is a local
* reference that is only valid in this environment.
* This function returns a NULL reference if no component could be
* found with matching platform information.
*/
jobject (JNICALL *GetComponent)(JNIEnv* env, void* platformInfo);
/**
* Since 9
* Creates a java.awt.Frame placed in a native container. Container is
* referenced by the native platform handle. For example on Windows this
* corresponds to an HWND. For other platforms, see the appropriate
* machine-dependent header file for a description. The reference returned
* by this function is a local reference that is only valid in this
* environment. This function returns a NULL reference if no frame could be
* created with matching platform information.
*/
jobject (JNICALL *CreateEmbeddedFrame) (JNIEnv *env, void* platformInfo);
/**
* Since 9
* Moves and resizes the embedded frame. The new location of the top-left
* corner is specified by x and y parameters relative to the native parent
* component. The new size is specified by width and height.
*
* The embedded frame should be created by CreateEmbeddedFrame() method, or
* this function will not have any effect.
*
* java.awt.Component.setLocation() and java.awt.Component.setBounds() for
* EmbeddedFrame really don't move it within the native parent. These
* methods always locate the embedded frame at (0, 0) for backward
* compatibility. To allow moving embedded frames this method was
* introduced, and it works just the same way as setLocation() and
* setBounds() for usual, non-embedded components.
*
* Using usual get/setLocation() and get/setBounds() together with this new
* method is not recommended.
*/
void (JNICALL *SetBounds) (JNIEnv *env, jobject embeddedFrame,
jint x, jint y, jint w, jint h);
/**
* Since 9
* Synthesize a native message to activate or deactivate an EmbeddedFrame
* window depending on the value of parameter doActivate, if "true"
* activates the window; otherwise, deactivates the window.
*
* The embedded frame should be created by CreateEmbeddedFrame() method, or
* this function will not have any effect.
*/
void (JNICALL *SynthesizeWindowActivation) (JNIEnv *env,
jobject embeddedFrame, jboolean doActivate);
} JAWT;
/*
* Get the AWT native structure. This function returns JNI_FALSE if
* an error occurs.
*/
_JNI_IMPORT_OR_EXPORT_
jboolean JNICALL JAWT_GetAWT(JNIEnv* env, JAWT* awt);
/*
* Specify one of these constants as the JAWT.version
* Specifying an earlier version will limit the available functions to
* those provided in that earlier version of JAWT.
* See the "Since" note on each API. Methods with no "Since"
* may be presumed to be present in JAWT_VERSION_1_3.
*/
#define JAWT_VERSION_1_3 0x00010003
#define JAWT_VERSION_1_4 0x00010004
#define JAWT_VERSION_1_7 0x00010007
#define JAWT_VERSION_9 0x00090000
#ifdef __cplusplus
} /* extern "C" */
#endif
#endif /* !_JAVASOFT_JAWT_H_ */

View File

@@ -1,276 +0,0 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* Java Debug Wire Protocol Transport Service Provider Interface.
*/
#ifndef JDWPTRANSPORT_H
#define JDWPTRANSPORT_H
#include "jni.h"
enum {
JDWPTRANSPORT_VERSION_1_0 = 0x00010000,
JDWPTRANSPORT_VERSION_1_1 = 0x00010001
};
#ifdef __cplusplus
extern "C" {
#endif
struct jdwpTransportNativeInterface_;
struct _jdwpTransportEnv;
#ifdef __cplusplus
typedef _jdwpTransportEnv jdwpTransportEnv;
#else
typedef const struct jdwpTransportNativeInterface_ *jdwpTransportEnv;
#endif /* __cplusplus */
/*
* Errors. Universal errors with JVMTI/JVMDI equivalents keep the
* values the same.
*/
typedef enum {
JDWPTRANSPORT_ERROR_NONE = 0,
JDWPTRANSPORT_ERROR_ILLEGAL_ARGUMENT = 103,
JDWPTRANSPORT_ERROR_OUT_OF_MEMORY = 110,
JDWPTRANSPORT_ERROR_INTERNAL = 113,
JDWPTRANSPORT_ERROR_ILLEGAL_STATE = 201,
JDWPTRANSPORT_ERROR_IO_ERROR = 202,
JDWPTRANSPORT_ERROR_TIMEOUT = 203,
JDWPTRANSPORT_ERROR_MSG_NOT_AVAILABLE = 204
} jdwpTransportError;
/*
* Structure to define capabilities
*/
typedef struct {
unsigned int can_timeout_attach :1;
unsigned int can_timeout_accept :1;
unsigned int can_timeout_handshake :1;
unsigned int reserved3 :1;
unsigned int reserved4 :1;
unsigned int reserved5 :1;
unsigned int reserved6 :1;
unsigned int reserved7 :1;
unsigned int reserved8 :1;
unsigned int reserved9 :1;
unsigned int reserved10 :1;
unsigned int reserved11 :1;
unsigned int reserved12 :1;
unsigned int reserved13 :1;
unsigned int reserved14 :1;
unsigned int reserved15 :1;
} JDWPTransportCapabilities;
/*
* Structures to define packet layout.
*
* See: http://java.sun.com/j2se/1.5/docs/guide/jpda/jdwp-spec.html
*/
#define JDWP_HEADER_SIZE 11
enum {
/*
* If additional flags are added that apply to jdwpCmdPacket,
* then debugLoop.c: reader() will need to be updated to
* accept more than JDWPTRANSPORT_FLAGS_NONE.
*/
JDWPTRANSPORT_FLAGS_NONE = 0x0,
JDWPTRANSPORT_FLAGS_REPLY = 0x80
};
typedef struct {
jint len;
jint id;
jbyte flags;
jbyte cmdSet;
jbyte cmd;
jbyte *data;
} jdwpCmdPacket;
typedef struct {
jint len;
jint id;
jbyte flags;
jshort errorCode;
jbyte *data;
} jdwpReplyPacket;
typedef struct {
union {
jdwpCmdPacket cmd;
jdwpReplyPacket reply;
} type;
} jdwpPacket;
/*
* JDWP functions called by the transport.
*/
typedef struct jdwpTransportCallback {
void *(*alloc)(jint numBytes); /* Call this for all allocations */
void (*free)(void *buffer); /* Call this for all deallocations */
} jdwpTransportCallback;
typedef jint (JNICALL *jdwpTransport_OnLoad_t)(JavaVM *jvm,
jdwpTransportCallback *callback,
jint version,
jdwpTransportEnv** env);
/*
* JDWP transport configuration from the agent.
*/
typedef struct jdwpTransportConfiguration {
/* Field added in JDWPTRANSPORT_VERSION_1_1: */
const char* allowed_peers; /* Peers allowed for connection */
} jdwpTransportConfiguration;
/* Function Interface */
struct jdwpTransportNativeInterface_ {
/* 1 : RESERVED */
void *reserved1;
/* 2 : Get Capabilities */
jdwpTransportError (JNICALL *GetCapabilities)(jdwpTransportEnv* env,
JDWPTransportCapabilities *capabilities_ptr);
/* 3 : Attach */
jdwpTransportError (JNICALL *Attach)(jdwpTransportEnv* env,
const char* address,
jlong attach_timeout,
jlong handshake_timeout);
/* 4: StartListening */
jdwpTransportError (JNICALL *StartListening)(jdwpTransportEnv* env,
const char* address,
char** actual_address);
/* 5: StopListening */
jdwpTransportError (JNICALL *StopListening)(jdwpTransportEnv* env);
/* 6: Accept */
jdwpTransportError (JNICALL *Accept)(jdwpTransportEnv* env,
jlong accept_timeout,
jlong handshake_timeout);
/* 7: IsOpen */
jboolean (JNICALL *IsOpen)(jdwpTransportEnv* env);
/* 8: Close */
jdwpTransportError (JNICALL *Close)(jdwpTransportEnv* env);
/* 9: ReadPacket */
jdwpTransportError (JNICALL *ReadPacket)(jdwpTransportEnv* env,
jdwpPacket *pkt);
/* 10: Write Packet */
jdwpTransportError (JNICALL *WritePacket)(jdwpTransportEnv* env,
const jdwpPacket* pkt);
/* 11: GetLastError */
jdwpTransportError (JNICALL *GetLastError)(jdwpTransportEnv* env,
char** error);
/* 12: SetTransportConfiguration added in JDWPTRANSPORT_VERSION_1_1 */
jdwpTransportError (JNICALL *SetTransportConfiguration)(jdwpTransportEnv* env,
jdwpTransportConfiguration *config);
};
/*
* Use inlined functions so that C++ code can use syntax such as
* env->Attach("mymachine:5000", 10*1000, 0);
*
* rather than using C's :-
*
* (*env)->Attach(env, "mymachine:5000", 10*1000, 0);
*/
struct _jdwpTransportEnv {
const struct jdwpTransportNativeInterface_ *functions;
#ifdef __cplusplus
jdwpTransportError GetCapabilities(JDWPTransportCapabilities *capabilities_ptr) {
return functions->GetCapabilities(this, capabilities_ptr);
}
jdwpTransportError Attach(const char* address, jlong attach_timeout,
jlong handshake_timeout) {
return functions->Attach(this, address, attach_timeout, handshake_timeout);
}
jdwpTransportError StartListening(const char* address,
char** actual_address) {
return functions->StartListening(this, address, actual_address);
}
jdwpTransportError StopListening(void) {
return functions->StopListening(this);
}
jdwpTransportError Accept(jlong accept_timeout, jlong handshake_timeout) {
return functions->Accept(this, accept_timeout, handshake_timeout);
}
jboolean IsOpen(void) {
return functions->IsOpen(this);
}
jdwpTransportError Close(void) {
return functions->Close(this);
}
jdwpTransportError ReadPacket(jdwpPacket *pkt) {
return functions->ReadPacket(this, pkt);
}
jdwpTransportError WritePacket(const jdwpPacket* pkt) {
return functions->WritePacket(this, pkt);
}
jdwpTransportError GetLastError(char** error) {
return functions->GetLastError(this, error);
}
/* SetTransportConfiguration added in JDWPTRANSPORT_VERSION_1_1 */
jdwpTransportError SetTransportConfiguration(jdwpTransportConfiguration *config) {
return functions->SetTransportConfiguration(this, config);
}
#endif /* __cplusplus */
};
#ifdef __cplusplus
} /* extern "C" */
#endif /* __cplusplus */
#endif /* JDWPTRANSPORT_H */

View File

@@ -1,115 +0,0 @@
/*
* Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This header file defines the data structures sent by the VM
* through the JVMTI CompiledMethodLoad callback function via the
* "void * compile_info" parameter. The memory pointed to by the
* compile_info parameter may not be referenced after returning from
* the CompiledMethodLoad callback. These are VM implementation
* specific data structures that may evolve in future releases. A
* JVMTI agent should interpret a non-NULL compile_info as a pointer
* to a region of memory containing a list of records. In a typical
* usage scenario, a JVMTI agent would cast each record to a
* jvmtiCompiledMethodLoadRecordHeader, a struct that represents
* arbitrary information. This struct contains a kind field to indicate
* the kind of information being passed, and a pointer to the next
* record. If the kind field indicates inlining information, then the
* agent would cast the record to a jvmtiCompiledMethodLoadInlineRecord.
* This record contains an array of PCStackInfo structs, which indicate
* for every pc address what are the methods on the invocation stack.
* The "methods" and "bcis" fields in each PCStackInfo struct specify a
* 1-1 mapping between these inlined methods and their bytecode indices.
* This can be used to derive the proper source lines of the inlined
* methods.
*/
#ifndef _JVMTI_CMLR_H_
#define _JVMTI_CMLR_H_
enum {
JVMTI_CMLR_MAJOR_VERSION_1 = 0x00000001,
JVMTI_CMLR_MINOR_VERSION_0 = 0x00000000,
JVMTI_CMLR_MAJOR_VERSION = 0x00000001,
JVMTI_CMLR_MINOR_VERSION = 0x00000000
/*
* This comment is for the "JDK import from HotSpot" sanity check:
* version: 1.0.0
*/
};
typedef enum {
JVMTI_CMLR_DUMMY = 1,
JVMTI_CMLR_INLINE_INFO = 2
} jvmtiCMLRKind;
/*
* Record that represents arbitrary information passed through JVMTI
* CompiledMethodLoadEvent void pointer.
*/
typedef struct _jvmtiCompiledMethodLoadRecordHeader {
jvmtiCMLRKind kind; /* id for the kind of info passed in the record */
jint majorinfoversion; /* major and minor info version values. Init'ed */
jint minorinfoversion; /* to current version value in jvmtiExport.cpp. */
struct _jvmtiCompiledMethodLoadRecordHeader* next;
} jvmtiCompiledMethodLoadRecordHeader;
/*
* Record that gives information about the methods on the compile-time
* stack at a specific pc address of a compiled method. Each element in
* the methods array maps to same element in the bcis array.
*/
typedef struct _PCStackInfo {
void* pc; /* the pc address for this compiled method */
jint numstackframes; /* number of methods on the stack */
jmethodID* methods; /* array of numstackframes method ids */
jint* bcis; /* array of numstackframes bytecode indices */
} PCStackInfo;
/*
* Record that contains inlining information for each pc address of
* an nmethod.
*/
typedef struct _jvmtiCompiledMethodLoadInlineRecord {
jvmtiCompiledMethodLoadRecordHeader header; /* common header for casting */
jint numpcs; /* number of pc descriptors in this nmethod */
PCStackInfo* pcinfo; /* array of numpcs pc descriptors */
} jvmtiCompiledMethodLoadInlineRecord;
/*
* Dummy record used to test that we can pass records with different
* information through the void pointer provided that they can be cast
* to a jvmtiCompiledMethodLoadRecordHeader.
*/
typedef struct _jvmtiCompiledMethodLoadDummyRecord {
jvmtiCompiledMethodLoadRecordHeader header; /* common header for casting */
char message[50];
} jvmtiCompiledMethodLoadDummyRecord;
#endif

View File

@@ -1,60 +0,0 @@
/*
* Copyright (c) 1999, 2001, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
#ifndef _JAVASOFT_JAWT_MD_H_
#define _JAVASOFT_JAWT_MD_H_
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include "jawt.h"
#ifdef __cplusplus
extern "C" {
#endif
/*
* X11-specific declarations for AWT native interface.
* See notes in jawt.h for an example of use.
*/
typedef struct jawt_X11DrawingSurfaceInfo {
Drawable drawable;
Display* display;
VisualID visualID;
Colormap colormapID;
int depth;
/*
* Since 1.4
* Returns a pixel value from a set of RGB values.
* This is useful for paletted color (256 color) modes.
*/
int (JNICALL *GetAWTColor)(JAWT_DrawingSurface* ds,
int r, int g, int b);
} JAWT_X11DrawingSurfaceInfo;
#ifdef __cplusplus
}
#endif
#endif /* !_JAVASOFT_JAWT_MD_H_ */

View File

@@ -1,66 +0,0 @@
/*
* Copyright (c) 1996, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
#ifndef _JAVASOFT_JNI_MD_H_
#define _JAVASOFT_JNI_MD_H_
#ifndef __has_attribute
#define __has_attribute(x) 0
#endif
#ifndef JNIEXPORT
#if (defined(__GNUC__) && ((__GNUC__ > 4) || (__GNUC__ == 4) && (__GNUC_MINOR__ > 2))) || __has_attribute(visibility)
#ifdef ARM
#define JNIEXPORT __attribute__((externally_visible,visibility("default")))
#else
#define JNIEXPORT __attribute__((visibility("default")))
#endif
#else
#define JNIEXPORT
#endif
#endif
#if (defined(__GNUC__) && ((__GNUC__ > 4) || (__GNUC__ == 4) && (__GNUC_MINOR__ > 2))) || __has_attribute(visibility)
#ifdef ARM
#define JNIIMPORT __attribute__((externally_visible,visibility("default")))
#else
#define JNIIMPORT __attribute__((visibility("default")))
#endif
#else
#define JNIIMPORT
#endif
#define JNICALL
typedef int jint;
#ifdef _LP64
typedef long jlong;
#else
typedef long long jlong;
#endif
typedef signed char jbyte;
#endif /* !_JAVASOFT_JNI_MD_H_ */

Some files were not shown because too many files have changed in this diff Show More