up
This commit is contained in:
@@ -1,11 +1,17 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Infrastructure.Postgres;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for API key operations.
|
||||
/// </summary>
|
||||
public sealed class ApiKeyRepository : RepositoryBase<AuthorityDataSource>, IApiKeyRepository
|
||||
{
|
||||
public ApiKeyRepository(AuthorityDataSource dataSource) : base(dataSource) { }
|
||||
public ApiKeyRepository(AuthorityDataSource dataSource, ILogger<ApiKeyRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<ApiKeyEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -14,9 +20,9 @@ public sealed class ApiKeyRepository : RepositoryBase<AuthorityDataSource>, IApi
|
||||
FROM authority.api_keys
|
||||
WHERE tenant_id = @tenant_id AND id = @id
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql, MapApiKey,
|
||||
cmd => { cmd.Parameters.AddWithValue("id", id); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "id", id); },
|
||||
MapApiKey, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<ApiKeyEntity?> GetByPrefixAsync(string keyPrefix, CancellationToken cancellationToken = default)
|
||||
@@ -27,9 +33,8 @@ public sealed class ApiKeyRepository : RepositoryBase<AuthorityDataSource>, IApi
|
||||
WHERE key_prefix = @key_prefix AND status = 'active'
|
||||
""";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = connection.CreateCommand();
|
||||
command.CommandText = sql;
|
||||
command.Parameters.AddWithValue("key_prefix", keyPrefix);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "key_prefix", keyPrefix);
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
return await reader.ReadAsync(cancellationToken).ConfigureAwait(false) ? MapApiKey(reader) : null;
|
||||
}
|
||||
@@ -42,7 +47,9 @@ public sealed class ApiKeyRepository : RepositoryBase<AuthorityDataSource>, IApi
|
||||
WHERE tenant_id = @tenant_id
|
||||
ORDER BY created_at DESC
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapApiKey, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => AddParameter(cmd, "tenant_id", tenantId),
|
||||
MapApiKey, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<ApiKeyEntity>> GetByUserIdAsync(string tenantId, Guid userId, CancellationToken cancellationToken = default)
|
||||
@@ -53,9 +60,9 @@ public sealed class ApiKeyRepository : RepositoryBase<AuthorityDataSource>, IApi
|
||||
WHERE tenant_id = @tenant_id AND user_id = @user_id
|
||||
ORDER BY created_at DESC
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapApiKey,
|
||||
cmd => { cmd.Parameters.AddWithValue("user_id", userId); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "user_id", userId); },
|
||||
MapApiKey, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<Guid> CreateAsync(string tenantId, ApiKeyEntity apiKey, CancellationToken cancellationToken = default)
|
||||
@@ -66,25 +73,28 @@ public sealed class ApiKeyRepository : RepositoryBase<AuthorityDataSource>, IApi
|
||||
RETURNING id
|
||||
""";
|
||||
var id = apiKey.Id == Guid.Empty ? Guid.NewGuid() : apiKey.Id;
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("id", id);
|
||||
AddNullableParameter(cmd, "user_id", apiKey.UserId);
|
||||
cmd.Parameters.AddWithValue("name", apiKey.Name);
|
||||
cmd.Parameters.AddWithValue("key_hash", apiKey.KeyHash);
|
||||
cmd.Parameters.AddWithValue("key_prefix", apiKey.KeyPrefix);
|
||||
AddArrayParameter(cmd, "scopes", apiKey.Scopes);
|
||||
cmd.Parameters.AddWithValue("status", apiKey.Status);
|
||||
AddNullableParameter(cmd, "expires_at", apiKey.ExpiresAt);
|
||||
AddJsonbParameter(cmd, "metadata", apiKey.Metadata);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "id", id);
|
||||
AddParameter(command, "tenant_id", tenantId);
|
||||
AddParameter(command, "user_id", apiKey.UserId);
|
||||
AddParameter(command, "name", apiKey.Name);
|
||||
AddParameter(command, "key_hash", apiKey.KeyHash);
|
||||
AddParameter(command, "key_prefix", apiKey.KeyPrefix);
|
||||
AddTextArrayParameter(command, "scopes", apiKey.Scopes);
|
||||
AddParameter(command, "status", apiKey.Status);
|
||||
AddParameter(command, "expires_at", apiKey.ExpiresAt);
|
||||
AddJsonbParameter(command, "metadata", apiKey.Metadata);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
return id;
|
||||
}
|
||||
|
||||
public async Task UpdateLastUsedAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = "UPDATE authority.api_keys SET last_used_at = NOW() WHERE tenant_id = @tenant_id AND id = @id";
|
||||
await ExecuteAsync(tenantId, sql, cmd => { cmd.Parameters.AddWithValue("id", id); }, cancellationToken).ConfigureAwait(false);
|
||||
await ExecuteAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "id", id); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task RevokeAsync(string tenantId, Guid id, string revokedBy, CancellationToken cancellationToken = default)
|
||||
@@ -95,32 +105,35 @@ public sealed class ApiKeyRepository : RepositoryBase<AuthorityDataSource>, IApi
|
||||
""";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("id", id);
|
||||
cmd.Parameters.AddWithValue("revoked_by", revokedBy);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "id", id);
|
||||
AddParameter(cmd, "revoked_by", revokedBy);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task DeleteAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = "DELETE FROM authority.api_keys WHERE tenant_id = @tenant_id AND id = @id";
|
||||
await ExecuteAsync(tenantId, sql, cmd => { cmd.Parameters.AddWithValue("id", id); }, cancellationToken).ConfigureAwait(false);
|
||||
await ExecuteAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "id", id); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static ApiKeyEntity MapApiKey(System.Data.Common.DbDataReader reader) => new()
|
||||
private static ApiKeyEntity MapApiKey(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
TenantId = reader.GetString(1),
|
||||
UserId = reader.IsDBNull(2) ? null : reader.GetGuid(2),
|
||||
UserId = GetNullableGuid(reader, 2),
|
||||
Name = reader.GetString(3),
|
||||
KeyHash = reader.GetString(4),
|
||||
KeyPrefix = reader.GetString(5),
|
||||
Scopes = reader.IsDBNull(6) ? [] : reader.GetFieldValue<string[]>(6),
|
||||
Status = reader.GetString(7),
|
||||
LastUsedAt = reader.IsDBNull(8) ? null : reader.GetFieldValue<DateTimeOffset>(8),
|
||||
ExpiresAt = reader.IsDBNull(9) ? null : reader.GetFieldValue<DateTimeOffset>(9),
|
||||
LastUsedAt = GetNullableDateTimeOffset(reader, 8),
|
||||
ExpiresAt = GetNullableDateTimeOffset(reader, 9),
|
||||
Metadata = reader.GetString(10),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(11),
|
||||
RevokedAt = reader.IsDBNull(12) ? null : reader.GetFieldValue<DateTimeOffset>(12),
|
||||
RevokedBy = reader.IsDBNull(13) ? null : reader.GetString(13)
|
||||
RevokedAt = GetNullableDateTimeOffset(reader, 12),
|
||||
RevokedBy = GetNullableString(reader, 13)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Infrastructure.Postgres;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for audit log operations.
|
||||
/// </summary>
|
||||
public sealed class AuditRepository : RepositoryBase<AuthorityDataSource>, IAuditRepository
|
||||
{
|
||||
public AuditRepository(AuthorityDataSource dataSource) : base(dataSource) { }
|
||||
public AuditRepository(AuthorityDataSource dataSource, ILogger<AuditRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<long> CreateAsync(string tenantId, AuditEntity audit, CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -14,19 +20,18 @@ public sealed class AuditRepository : RepositoryBase<AuthorityDataSource>, IAudi
|
||||
VALUES (@tenant_id, @user_id, @action, @resource_type, @resource_id, @old_value::jsonb, @new_value::jsonb, @ip_address, @user_agent, @correlation_id)
|
||||
RETURNING id
|
||||
""";
|
||||
await using var connection = await DataSource.OpenConnectionAsync(tenantId, DataSourceRole.Writer, cancellationToken).ConfigureAwait(false);
|
||||
await using var command = connection.CreateCommand();
|
||||
command.CommandText = sql;
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
AddNullableParameter(command, "user_id", audit.UserId);
|
||||
command.Parameters.AddWithValue("action", audit.Action);
|
||||
command.Parameters.AddWithValue("resource_type", audit.ResourceType);
|
||||
AddNullableParameter(command, "resource_id", audit.ResourceId);
|
||||
AddNullableJsonbParameter(command, "old_value", audit.OldValue);
|
||||
AddNullableJsonbParameter(command, "new_value", audit.NewValue);
|
||||
AddNullableParameter(command, "ip_address", audit.IpAddress);
|
||||
AddNullableParameter(command, "user_agent", audit.UserAgent);
|
||||
AddNullableParameter(command, "correlation_id", audit.CorrelationId);
|
||||
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "tenant_id", tenantId);
|
||||
AddParameter(command, "user_id", audit.UserId);
|
||||
AddParameter(command, "action", audit.Action);
|
||||
AddParameter(command, "resource_type", audit.ResourceType);
|
||||
AddParameter(command, "resource_id", audit.ResourceId);
|
||||
AddJsonbParameter(command, "old_value", audit.OldValue);
|
||||
AddJsonbParameter(command, "new_value", audit.NewValue);
|
||||
AddParameter(command, "ip_address", audit.IpAddress);
|
||||
AddParameter(command, "user_agent", audit.UserAgent);
|
||||
AddParameter(command, "correlation_id", audit.CorrelationId);
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
|
||||
return (long)result!;
|
||||
}
|
||||
@@ -40,11 +45,12 @@ public sealed class AuditRepository : RepositoryBase<AuthorityDataSource>, IAudi
|
||||
ORDER BY created_at DESC
|
||||
LIMIT @limit OFFSET @offset
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapAudit, cmd =>
|
||||
return await QueryAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("limit", limit);
|
||||
cmd.Parameters.AddWithValue("offset", offset);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "limit", limit);
|
||||
AddParameter(cmd, "offset", offset);
|
||||
}, MapAudit, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<AuditEntity>> GetByUserIdAsync(string tenantId, Guid userId, int limit = 100, CancellationToken cancellationToken = default)
|
||||
@@ -56,29 +62,31 @@ public sealed class AuditRepository : RepositoryBase<AuthorityDataSource>, IAudi
|
||||
ORDER BY created_at DESC
|
||||
LIMIT @limit
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapAudit, cmd =>
|
||||
return await QueryAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("user_id", userId);
|
||||
cmd.Parameters.AddWithValue("limit", limit);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "user_id", userId);
|
||||
AddParameter(cmd, "limit", limit);
|
||||
}, MapAudit, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<AuditEntity>> GetByResourceAsync(string tenantId, string resourceType, string? resourceId, int limit = 100, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
var sql = """
|
||||
SELECT id, tenant_id, user_id, action, resource_type, resource_id, old_value, new_value, ip_address, user_agent, correlation_id, created_at
|
||||
FROM authority.audit
|
||||
WHERE tenant_id = @tenant_id AND resource_type = @resource_type
|
||||
{(resourceId != null ? "AND resource_id = @resource_id" : "")}
|
||||
ORDER BY created_at DESC
|
||||
LIMIT @limit
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapAudit, cmd =>
|
||||
if (resourceId != null) sql += " AND resource_id = @resource_id";
|
||||
sql += " ORDER BY created_at DESC LIMIT @limit";
|
||||
|
||||
return await QueryAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("resource_type", resourceType);
|
||||
if (resourceId != null) cmd.Parameters.AddWithValue("resource_id", resourceId);
|
||||
cmd.Parameters.AddWithValue("limit", limit);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "resource_type", resourceType);
|
||||
if (resourceId != null) AddParameter(cmd, "resource_id", resourceId);
|
||||
AddParameter(cmd, "limit", limit);
|
||||
}, MapAudit, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<AuditEntity>> GetByCorrelationIdAsync(string tenantId, string correlationId, CancellationToken cancellationToken = default)
|
||||
@@ -89,9 +97,11 @@ public sealed class AuditRepository : RepositoryBase<AuthorityDataSource>, IAudi
|
||||
WHERE tenant_id = @tenant_id AND correlation_id = @correlation_id
|
||||
ORDER BY created_at
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapAudit,
|
||||
cmd => { cmd.Parameters.AddWithValue("correlation_id", correlationId); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QueryAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "correlation_id", correlationId);
|
||||
}, MapAudit, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<AuditEntity>> GetByActionAsync(string tenantId, string action, int limit = 100, CancellationToken cancellationToken = default)
|
||||
@@ -103,34 +113,27 @@ public sealed class AuditRepository : RepositoryBase<AuthorityDataSource>, IAudi
|
||||
ORDER BY created_at DESC
|
||||
LIMIT @limit
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapAudit, cmd =>
|
||||
return await QueryAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("action", action);
|
||||
cmd.Parameters.AddWithValue("limit", limit);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "action", action);
|
||||
AddParameter(cmd, "limit", limit);
|
||||
}, MapAudit, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private void AddNullableJsonbParameter(Npgsql.NpgsqlCommand cmd, string name, string? value)
|
||||
{
|
||||
if (value == null)
|
||||
cmd.Parameters.AddWithValue(name, DBNull.Value);
|
||||
else
|
||||
AddJsonbParameter(cmd, name, value);
|
||||
}
|
||||
|
||||
private static AuditEntity MapAudit(System.Data.Common.DbDataReader reader) => new()
|
||||
private static AuditEntity MapAudit(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetInt64(0),
|
||||
TenantId = reader.GetString(1),
|
||||
UserId = reader.IsDBNull(2) ? null : reader.GetGuid(2),
|
||||
UserId = GetNullableGuid(reader, 2),
|
||||
Action = reader.GetString(3),
|
||||
ResourceType = reader.GetString(4),
|
||||
ResourceId = reader.IsDBNull(5) ? null : reader.GetString(5),
|
||||
OldValue = reader.IsDBNull(6) ? null : reader.GetString(6),
|
||||
NewValue = reader.IsDBNull(7) ? null : reader.GetString(7),
|
||||
IpAddress = reader.IsDBNull(8) ? null : reader.GetString(8),
|
||||
UserAgent = reader.IsDBNull(9) ? null : reader.GetString(9),
|
||||
CorrelationId = reader.IsDBNull(10) ? null : reader.GetString(10),
|
||||
ResourceId = GetNullableString(reader, 5),
|
||||
OldValue = GetNullableString(reader, 6),
|
||||
NewValue = GetNullableString(reader, 7),
|
||||
IpAddress = GetNullableString(reader, 8),
|
||||
UserAgent = GetNullableString(reader, 9),
|
||||
CorrelationId = GetNullableString(reader, 10),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(11)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Infrastructure.Postgres;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for permission operations.
|
||||
/// </summary>
|
||||
public sealed class PermissionRepository : RepositoryBase<AuthorityDataSource>, IPermissionRepository
|
||||
{
|
||||
public PermissionRepository(AuthorityDataSource dataSource) : base(dataSource) { }
|
||||
public PermissionRepository(AuthorityDataSource dataSource, ILogger<PermissionRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<PermissionEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -14,9 +20,9 @@ public sealed class PermissionRepository : RepositoryBase<AuthorityDataSource>,
|
||||
FROM authority.permissions
|
||||
WHERE tenant_id = @tenant_id AND id = @id
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql, MapPermission,
|
||||
cmd => { cmd.Parameters.AddWithValue("id", id); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "id", id); },
|
||||
MapPermission, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<PermissionEntity?> GetByNameAsync(string tenantId, string name, CancellationToken cancellationToken = default)
|
||||
@@ -26,9 +32,9 @@ public sealed class PermissionRepository : RepositoryBase<AuthorityDataSource>,
|
||||
FROM authority.permissions
|
||||
WHERE tenant_id = @tenant_id AND name = @name
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql, MapPermission,
|
||||
cmd => { cmd.Parameters.AddWithValue("name", name); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "name", name); },
|
||||
MapPermission, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PermissionEntity>> ListAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
@@ -39,7 +45,9 @@ public sealed class PermissionRepository : RepositoryBase<AuthorityDataSource>,
|
||||
WHERE tenant_id = @tenant_id
|
||||
ORDER BY resource, action
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapPermission, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => AddParameter(cmd, "tenant_id", tenantId),
|
||||
MapPermission, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PermissionEntity>> GetByResourceAsync(string tenantId, string resource, CancellationToken cancellationToken = default)
|
||||
@@ -50,9 +58,9 @@ public sealed class PermissionRepository : RepositoryBase<AuthorityDataSource>,
|
||||
WHERE tenant_id = @tenant_id AND resource = @resource
|
||||
ORDER BY action
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapPermission,
|
||||
cmd => { cmd.Parameters.AddWithValue("resource", resource); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "resource", resource); },
|
||||
MapPermission, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PermissionEntity>> GetRolePermissionsAsync(string tenantId, Guid roleId, CancellationToken cancellationToken = default)
|
||||
@@ -64,9 +72,9 @@ public sealed class PermissionRepository : RepositoryBase<AuthorityDataSource>,
|
||||
WHERE p.tenant_id = @tenant_id AND rp.role_id = @role_id
|
||||
ORDER BY p.resource, p.action
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapPermission,
|
||||
cmd => { cmd.Parameters.AddWithValue("role_id", roleId); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "role_id", roleId); },
|
||||
MapPermission, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PermissionEntity>> GetUserPermissionsAsync(string tenantId, Guid userId, CancellationToken cancellationToken = default)
|
||||
@@ -80,9 +88,9 @@ public sealed class PermissionRepository : RepositoryBase<AuthorityDataSource>,
|
||||
AND (ur.expires_at IS NULL OR ur.expires_at > NOW())
|
||||
ORDER BY p.resource, p.action
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapPermission,
|
||||
cmd => { cmd.Parameters.AddWithValue("user_id", userId); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "user_id", userId); },
|
||||
MapPermission, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<Guid> CreateAsync(string tenantId, PermissionEntity permission, CancellationToken cancellationToken = default)
|
||||
@@ -93,21 +101,24 @@ public sealed class PermissionRepository : RepositoryBase<AuthorityDataSource>,
|
||||
RETURNING id
|
||||
""";
|
||||
var id = permission.Id == Guid.Empty ? Guid.NewGuid() : permission.Id;
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("id", id);
|
||||
cmd.Parameters.AddWithValue("name", permission.Name);
|
||||
cmd.Parameters.AddWithValue("resource", permission.Resource);
|
||||
cmd.Parameters.AddWithValue("action", permission.Action);
|
||||
AddNullableParameter(cmd, "description", permission.Description);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "id", id);
|
||||
AddParameter(command, "tenant_id", tenantId);
|
||||
AddParameter(command, "name", permission.Name);
|
||||
AddParameter(command, "resource", permission.Resource);
|
||||
AddParameter(command, "action", permission.Action);
|
||||
AddParameter(command, "description", permission.Description);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
return id;
|
||||
}
|
||||
|
||||
public async Task DeleteAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = "DELETE FROM authority.permissions WHERE tenant_id = @tenant_id AND id = @id";
|
||||
await ExecuteAsync(tenantId, sql, cmd => { cmd.Parameters.AddWithValue("id", id); }, cancellationToken).ConfigureAwait(false);
|
||||
await ExecuteAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "id", id); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task AssignToRoleAsync(string tenantId, Guid roleId, Guid permissionId, CancellationToken cancellationToken = default)
|
||||
@@ -119,8 +130,8 @@ public sealed class PermissionRepository : RepositoryBase<AuthorityDataSource>,
|
||||
""";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("role_id", roleId);
|
||||
cmd.Parameters.AddWithValue("permission_id", permissionId);
|
||||
AddParameter(cmd, "role_id", roleId);
|
||||
AddParameter(cmd, "permission_id", permissionId);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -129,19 +140,19 @@ public sealed class PermissionRepository : RepositoryBase<AuthorityDataSource>,
|
||||
const string sql = "DELETE FROM authority.role_permissions WHERE role_id = @role_id AND permission_id = @permission_id";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("role_id", roleId);
|
||||
cmd.Parameters.AddWithValue("permission_id", permissionId);
|
||||
AddParameter(cmd, "role_id", roleId);
|
||||
AddParameter(cmd, "permission_id", permissionId);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static PermissionEntity MapPermission(System.Data.Common.DbDataReader reader) => new()
|
||||
private static PermissionEntity MapPermission(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
TenantId = reader.GetString(1),
|
||||
Name = reader.GetString(2),
|
||||
Resource = reader.GetString(3),
|
||||
Action = reader.GetString(4),
|
||||
Description = reader.IsDBNull(5) ? null : reader.GetString(5),
|
||||
Description = GetNullableString(reader, 5),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(6)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Infrastructure.Postgres;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for role operations.
|
||||
/// </summary>
|
||||
public sealed class RoleRepository : RepositoryBase<AuthorityDataSource>, IRoleRepository
|
||||
{
|
||||
public RoleRepository(AuthorityDataSource dataSource) : base(dataSource) { }
|
||||
public RoleRepository(AuthorityDataSource dataSource, ILogger<RoleRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<RoleEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -14,9 +20,9 @@ public sealed class RoleRepository : RepositoryBase<AuthorityDataSource>, IRoleR
|
||||
FROM authority.roles
|
||||
WHERE tenant_id = @tenant_id AND id = @id
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql, MapRole,
|
||||
cmd => { cmd.Parameters.AddWithValue("id", id); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "id", id); },
|
||||
MapRole, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<RoleEntity?> GetByNameAsync(string tenantId, string name, CancellationToken cancellationToken = default)
|
||||
@@ -26,9 +32,9 @@ public sealed class RoleRepository : RepositoryBase<AuthorityDataSource>, IRoleR
|
||||
FROM authority.roles
|
||||
WHERE tenant_id = @tenant_id AND name = @name
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql, MapRole,
|
||||
cmd => { cmd.Parameters.AddWithValue("name", name); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "name", name); },
|
||||
MapRole, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<RoleEntity>> ListAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
@@ -39,7 +45,9 @@ public sealed class RoleRepository : RepositoryBase<AuthorityDataSource>, IRoleR
|
||||
WHERE tenant_id = @tenant_id
|
||||
ORDER BY name
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapRole, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => AddParameter(cmd, "tenant_id", tenantId),
|
||||
MapRole, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<RoleEntity>> GetUserRolesAsync(string tenantId, Guid userId, CancellationToken cancellationToken = default)
|
||||
@@ -52,9 +60,9 @@ public sealed class RoleRepository : RepositoryBase<AuthorityDataSource>, IRoleR
|
||||
AND (ur.expires_at IS NULL OR ur.expires_at > NOW())
|
||||
ORDER BY r.name
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapRole,
|
||||
cmd => { cmd.Parameters.AddWithValue("user_id", userId); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "user_id", userId); },
|
||||
MapRole, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<Guid> CreateAsync(string tenantId, RoleEntity role, CancellationToken cancellationToken = default)
|
||||
@@ -65,15 +73,16 @@ public sealed class RoleRepository : RepositoryBase<AuthorityDataSource>, IRoleR
|
||||
RETURNING id
|
||||
""";
|
||||
var id = role.Id == Guid.Empty ? Guid.NewGuid() : role.Id;
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("id", id);
|
||||
cmd.Parameters.AddWithValue("name", role.Name);
|
||||
AddNullableParameter(cmd, "display_name", role.DisplayName);
|
||||
AddNullableParameter(cmd, "description", role.Description);
|
||||
cmd.Parameters.AddWithValue("is_system", role.IsSystem);
|
||||
AddJsonbParameter(cmd, "metadata", role.Metadata);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "id", id);
|
||||
AddParameter(command, "tenant_id", tenantId);
|
||||
AddParameter(command, "name", role.Name);
|
||||
AddParameter(command, "display_name", role.DisplayName);
|
||||
AddParameter(command, "description", role.Description);
|
||||
AddParameter(command, "is_system", role.IsSystem);
|
||||
AddJsonbParameter(command, "metadata", role.Metadata);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
return id;
|
||||
}
|
||||
|
||||
@@ -87,11 +96,12 @@ public sealed class RoleRepository : RepositoryBase<AuthorityDataSource>, IRoleR
|
||||
""";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("id", role.Id);
|
||||
cmd.Parameters.AddWithValue("name", role.Name);
|
||||
AddNullableParameter(cmd, "display_name", role.DisplayName);
|
||||
AddNullableParameter(cmd, "description", role.Description);
|
||||
cmd.Parameters.AddWithValue("is_system", role.IsSystem);
|
||||
AddParameter(cmd, "id", role.Id);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "name", role.Name);
|
||||
AddParameter(cmd, "display_name", role.DisplayName);
|
||||
AddParameter(cmd, "description", role.Description);
|
||||
AddParameter(cmd, "is_system", role.IsSystem);
|
||||
AddJsonbParameter(cmd, "metadata", role.Metadata);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
@@ -99,7 +109,9 @@ public sealed class RoleRepository : RepositoryBase<AuthorityDataSource>, IRoleR
|
||||
public async Task DeleteAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = "DELETE FROM authority.roles WHERE tenant_id = @tenant_id AND id = @id";
|
||||
await ExecuteAsync(tenantId, sql, cmd => { cmd.Parameters.AddWithValue("id", id); }, cancellationToken).ConfigureAwait(false);
|
||||
await ExecuteAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "id", id); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task AssignToUserAsync(string tenantId, Guid userId, Guid roleId, string? grantedBy, DateTimeOffset? expiresAt, CancellationToken cancellationToken = default)
|
||||
@@ -112,10 +124,10 @@ public sealed class RoleRepository : RepositoryBase<AuthorityDataSource>, IRoleR
|
||||
""";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("user_id", userId);
|
||||
cmd.Parameters.AddWithValue("role_id", roleId);
|
||||
AddNullableParameter(cmd, "granted_by", grantedBy);
|
||||
AddNullableParameter(cmd, "expires_at", expiresAt);
|
||||
AddParameter(cmd, "user_id", userId);
|
||||
AddParameter(cmd, "role_id", roleId);
|
||||
AddParameter(cmd, "granted_by", grantedBy);
|
||||
AddParameter(cmd, "expires_at", expiresAt);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -124,18 +136,18 @@ public sealed class RoleRepository : RepositoryBase<AuthorityDataSource>, IRoleR
|
||||
const string sql = "DELETE FROM authority.user_roles WHERE user_id = @user_id AND role_id = @role_id";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("user_id", userId);
|
||||
cmd.Parameters.AddWithValue("role_id", roleId);
|
||||
AddParameter(cmd, "user_id", userId);
|
||||
AddParameter(cmd, "role_id", roleId);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static RoleEntity MapRole(System.Data.Common.DbDataReader reader) => new()
|
||||
private static RoleEntity MapRole(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
TenantId = reader.GetString(1),
|
||||
Name = reader.GetString(2),
|
||||
DisplayName = reader.IsDBNull(3) ? null : reader.GetString(3),
|
||||
Description = reader.IsDBNull(4) ? null : reader.GetString(4),
|
||||
DisplayName = GetNullableString(reader, 3),
|
||||
Description = GetNullableString(reader, 4),
|
||||
IsSystem = reader.GetBoolean(5),
|
||||
Metadata = reader.GetString(6),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(7),
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Infrastructure.Postgres;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for session operations.
|
||||
/// </summary>
|
||||
public sealed class SessionRepository : RepositoryBase<AuthorityDataSource>, ISessionRepository
|
||||
{
|
||||
public SessionRepository(AuthorityDataSource dataSource) : base(dataSource) { }
|
||||
public SessionRepository(AuthorityDataSource dataSource, ILogger<SessionRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<SessionEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -14,9 +20,9 @@ public sealed class SessionRepository : RepositoryBase<AuthorityDataSource>, ISe
|
||||
FROM authority.sessions
|
||||
WHERE tenant_id = @tenant_id AND id = @id
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql, MapSession,
|
||||
cmd => { cmd.Parameters.AddWithValue("id", id); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "id", id); },
|
||||
MapSession, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<SessionEntity?> GetByTokenHashAsync(string sessionTokenHash, CancellationToken cancellationToken = default)
|
||||
@@ -27,25 +33,25 @@ public sealed class SessionRepository : RepositoryBase<AuthorityDataSource>, ISe
|
||||
WHERE session_token_hash = @session_token_hash AND ended_at IS NULL AND expires_at > NOW()
|
||||
""";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = connection.CreateCommand();
|
||||
command.CommandText = sql;
|
||||
command.Parameters.AddWithValue("session_token_hash", sessionTokenHash);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "session_token_hash", sessionTokenHash);
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
return await reader.ReadAsync(cancellationToken).ConfigureAwait(false) ? MapSession(reader) : null;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<SessionEntity>> GetByUserIdAsync(string tenantId, Guid userId, bool activeOnly = true, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
var sql = """
|
||||
SELECT id, tenant_id, user_id, session_token_hash, ip_address, user_agent, started_at, last_activity_at, expires_at, ended_at, end_reason, metadata
|
||||
FROM authority.sessions
|
||||
WHERE tenant_id = @tenant_id AND user_id = @user_id
|
||||
{(activeOnly ? "AND ended_at IS NULL AND expires_at > NOW()" : "")}
|
||||
ORDER BY started_at DESC
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapSession,
|
||||
cmd => { cmd.Parameters.AddWithValue("user_id", userId); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
if (activeOnly) sql += " AND ended_at IS NULL AND expires_at > NOW()";
|
||||
sql += " ORDER BY started_at DESC";
|
||||
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "user_id", userId); },
|
||||
MapSession, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<Guid> CreateAsync(string tenantId, SessionEntity session, CancellationToken cancellationToken = default)
|
||||
@@ -56,23 +62,26 @@ public sealed class SessionRepository : RepositoryBase<AuthorityDataSource>, ISe
|
||||
RETURNING id
|
||||
""";
|
||||
var id = session.Id == Guid.Empty ? Guid.NewGuid() : session.Id;
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("id", id);
|
||||
cmd.Parameters.AddWithValue("user_id", session.UserId);
|
||||
cmd.Parameters.AddWithValue("session_token_hash", session.SessionTokenHash);
|
||||
AddNullableParameter(cmd, "ip_address", session.IpAddress);
|
||||
AddNullableParameter(cmd, "user_agent", session.UserAgent);
|
||||
cmd.Parameters.AddWithValue("expires_at", session.ExpiresAt);
|
||||
AddJsonbParameter(cmd, "metadata", session.Metadata);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "id", id);
|
||||
AddParameter(command, "tenant_id", tenantId);
|
||||
AddParameter(command, "user_id", session.UserId);
|
||||
AddParameter(command, "session_token_hash", session.SessionTokenHash);
|
||||
AddParameter(command, "ip_address", session.IpAddress);
|
||||
AddParameter(command, "user_agent", session.UserAgent);
|
||||
AddParameter(command, "expires_at", session.ExpiresAt);
|
||||
AddJsonbParameter(command, "metadata", session.Metadata);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
return id;
|
||||
}
|
||||
|
||||
public async Task UpdateLastActivityAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = "UPDATE authority.sessions SET last_activity_at = NOW() WHERE tenant_id = @tenant_id AND id = @id AND ended_at IS NULL";
|
||||
await ExecuteAsync(tenantId, sql, cmd => { cmd.Parameters.AddWithValue("id", id); }, cancellationToken).ConfigureAwait(false);
|
||||
await ExecuteAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "id", id); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task EndAsync(string tenantId, Guid id, string reason, CancellationToken cancellationToken = default)
|
||||
@@ -83,8 +92,9 @@ public sealed class SessionRepository : RepositoryBase<AuthorityDataSource>, ISe
|
||||
""";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("id", id);
|
||||
cmd.Parameters.AddWithValue("end_reason", reason);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "id", id);
|
||||
AddParameter(cmd, "end_reason", reason);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -96,8 +106,9 @@ public sealed class SessionRepository : RepositoryBase<AuthorityDataSource>, ISe
|
||||
""";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("user_id", userId);
|
||||
cmd.Parameters.AddWithValue("end_reason", reason);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "user_id", userId);
|
||||
AddParameter(cmd, "end_reason", reason);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -105,24 +116,23 @@ public sealed class SessionRepository : RepositoryBase<AuthorityDataSource>, ISe
|
||||
{
|
||||
const string sql = "DELETE FROM authority.sessions WHERE expires_at < NOW() - INTERVAL '30 days'";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = connection.CreateCommand();
|
||||
command.CommandText = sql;
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static SessionEntity MapSession(System.Data.Common.DbDataReader reader) => new()
|
||||
private static SessionEntity MapSession(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
TenantId = reader.GetString(1),
|
||||
UserId = reader.GetGuid(2),
|
||||
SessionTokenHash = reader.GetString(3),
|
||||
IpAddress = reader.IsDBNull(4) ? null : reader.GetString(4),
|
||||
UserAgent = reader.IsDBNull(5) ? null : reader.GetString(5),
|
||||
IpAddress = GetNullableString(reader, 4),
|
||||
UserAgent = GetNullableString(reader, 5),
|
||||
StartedAt = reader.GetFieldValue<DateTimeOffset>(6),
|
||||
LastActivityAt = reader.GetFieldValue<DateTimeOffset>(7),
|
||||
ExpiresAt = reader.GetFieldValue<DateTimeOffset>(8),
|
||||
EndedAt = reader.IsDBNull(9) ? null : reader.GetFieldValue<DateTimeOffset>(9),
|
||||
EndReason = reader.IsDBNull(10) ? null : reader.GetString(10),
|
||||
EndedAt = GetNullableDateTimeOffset(reader, 9),
|
||||
EndReason = GetNullableString(reader, 10),
|
||||
Metadata = reader.GetString(11)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Infrastructure.Postgres;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for access token operations.
|
||||
/// </summary>
|
||||
public sealed class TokenRepository : RepositoryBase<AuthorityDataSource>, ITokenRepository
|
||||
{
|
||||
public TokenRepository(AuthorityDataSource dataSource) : base(dataSource) { }
|
||||
public TokenRepository(AuthorityDataSource dataSource, ILogger<TokenRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<TokenEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -14,9 +20,9 @@ public sealed class TokenRepository : RepositoryBase<AuthorityDataSource>, IToke
|
||||
FROM authority.tokens
|
||||
WHERE tenant_id = @tenant_id AND id = @id
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql, MapToken,
|
||||
cmd => { cmd.Parameters.AddWithValue("id", id); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "id", id); },
|
||||
MapToken, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<TokenEntity?> GetByHashAsync(string tokenHash, CancellationToken cancellationToken = default)
|
||||
@@ -27,9 +33,8 @@ public sealed class TokenRepository : RepositoryBase<AuthorityDataSource>, IToke
|
||||
WHERE token_hash = @token_hash AND revoked_at IS NULL AND expires_at > NOW()
|
||||
""";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = connection.CreateCommand();
|
||||
command.CommandText = sql;
|
||||
command.Parameters.AddWithValue("token_hash", tokenHash);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "token_hash", tokenHash);
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
return await reader.ReadAsync(cancellationToken).ConfigureAwait(false) ? MapToken(reader) : null;
|
||||
}
|
||||
@@ -42,9 +47,9 @@ public sealed class TokenRepository : RepositoryBase<AuthorityDataSource>, IToke
|
||||
WHERE tenant_id = @tenant_id AND user_id = @user_id AND revoked_at IS NULL
|
||||
ORDER BY issued_at DESC
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapToken,
|
||||
cmd => { cmd.Parameters.AddWithValue("user_id", userId); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "user_id", userId); },
|
||||
MapToken, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<Guid> CreateAsync(string tenantId, TokenEntity token, CancellationToken cancellationToken = default)
|
||||
@@ -55,17 +60,18 @@ public sealed class TokenRepository : RepositoryBase<AuthorityDataSource>, IToke
|
||||
RETURNING id
|
||||
""";
|
||||
var id = token.Id == Guid.Empty ? Guid.NewGuid() : token.Id;
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("id", id);
|
||||
AddNullableParameter(cmd, "user_id", token.UserId);
|
||||
cmd.Parameters.AddWithValue("token_hash", token.TokenHash);
|
||||
cmd.Parameters.AddWithValue("token_type", token.TokenType);
|
||||
AddArrayParameter(cmd, "scopes", token.Scopes);
|
||||
AddNullableParameter(cmd, "client_id", token.ClientId);
|
||||
cmd.Parameters.AddWithValue("expires_at", token.ExpiresAt);
|
||||
AddJsonbParameter(cmd, "metadata", token.Metadata);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "id", id);
|
||||
AddParameter(command, "tenant_id", tenantId);
|
||||
AddParameter(command, "user_id", token.UserId);
|
||||
AddParameter(command, "token_hash", token.TokenHash);
|
||||
AddParameter(command, "token_type", token.TokenType);
|
||||
AddTextArrayParameter(command, "scopes", token.Scopes);
|
||||
AddParameter(command, "client_id", token.ClientId);
|
||||
AddParameter(command, "expires_at", token.ExpiresAt);
|
||||
AddJsonbParameter(command, "metadata", token.Metadata);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
return id;
|
||||
}
|
||||
|
||||
@@ -77,8 +83,9 @@ public sealed class TokenRepository : RepositoryBase<AuthorityDataSource>, IToke
|
||||
""";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("id", id);
|
||||
cmd.Parameters.AddWithValue("revoked_by", revokedBy);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "id", id);
|
||||
AddParameter(cmd, "revoked_by", revokedBy);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -90,8 +97,9 @@ public sealed class TokenRepository : RepositoryBase<AuthorityDataSource>, IToke
|
||||
""";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("user_id", userId);
|
||||
cmd.Parameters.AddWithValue("revoked_by", revokedBy);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "user_id", userId);
|
||||
AddParameter(cmd, "revoked_by", revokedBy);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -99,31 +107,34 @@ public sealed class TokenRepository : RepositoryBase<AuthorityDataSource>, IToke
|
||||
{
|
||||
const string sql = "DELETE FROM authority.tokens WHERE expires_at < NOW() - INTERVAL '7 days'";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = connection.CreateCommand();
|
||||
command.CommandText = sql;
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static TokenEntity MapToken(System.Data.Common.DbDataReader reader) => new()
|
||||
private static TokenEntity MapToken(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
TenantId = reader.GetString(1),
|
||||
UserId = reader.IsDBNull(2) ? null : reader.GetGuid(2),
|
||||
UserId = GetNullableGuid(reader, 2),
|
||||
TokenHash = reader.GetString(3),
|
||||
TokenType = reader.GetString(4),
|
||||
Scopes = reader.IsDBNull(5) ? [] : reader.GetFieldValue<string[]>(5),
|
||||
ClientId = reader.IsDBNull(6) ? null : reader.GetString(6),
|
||||
ClientId = GetNullableString(reader, 6),
|
||||
IssuedAt = reader.GetFieldValue<DateTimeOffset>(7),
|
||||
ExpiresAt = reader.GetFieldValue<DateTimeOffset>(8),
|
||||
RevokedAt = reader.IsDBNull(9) ? null : reader.GetFieldValue<DateTimeOffset>(9),
|
||||
RevokedBy = reader.IsDBNull(10) ? null : reader.GetString(10),
|
||||
RevokedAt = GetNullableDateTimeOffset(reader, 9),
|
||||
RevokedBy = GetNullableString(reader, 10),
|
||||
Metadata = reader.GetString(11)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for refresh token operations.
|
||||
/// </summary>
|
||||
public sealed class RefreshTokenRepository : RepositoryBase<AuthorityDataSource>, IRefreshTokenRepository
|
||||
{
|
||||
public RefreshTokenRepository(AuthorityDataSource dataSource) : base(dataSource) { }
|
||||
public RefreshTokenRepository(AuthorityDataSource dataSource, ILogger<RefreshTokenRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<RefreshTokenEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -132,9 +143,9 @@ public sealed class RefreshTokenRepository : RepositoryBase<AuthorityDataSource>
|
||||
FROM authority.refresh_tokens
|
||||
WHERE tenant_id = @tenant_id AND id = @id
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql, MapRefreshToken,
|
||||
cmd => { cmd.Parameters.AddWithValue("id", id); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "id", id); },
|
||||
MapRefreshToken, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<RefreshTokenEntity?> GetByHashAsync(string tokenHash, CancellationToken cancellationToken = default)
|
||||
@@ -145,9 +156,8 @@ public sealed class RefreshTokenRepository : RepositoryBase<AuthorityDataSource>
|
||||
WHERE token_hash = @token_hash AND revoked_at IS NULL AND expires_at > NOW()
|
||||
""";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = connection.CreateCommand();
|
||||
command.CommandText = sql;
|
||||
command.Parameters.AddWithValue("token_hash", tokenHash);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "token_hash", tokenHash);
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
return await reader.ReadAsync(cancellationToken).ConfigureAwait(false) ? MapRefreshToken(reader) : null;
|
||||
}
|
||||
@@ -160,9 +170,9 @@ public sealed class RefreshTokenRepository : RepositoryBase<AuthorityDataSource>
|
||||
WHERE tenant_id = @tenant_id AND user_id = @user_id AND revoked_at IS NULL
|
||||
ORDER BY issued_at DESC
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, MapRefreshToken,
|
||||
cmd => { cmd.Parameters.AddWithValue("user_id", userId); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "user_id", userId); },
|
||||
MapRefreshToken, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<Guid> CreateAsync(string tenantId, RefreshTokenEntity token, CancellationToken cancellationToken = default)
|
||||
@@ -173,16 +183,17 @@ public sealed class RefreshTokenRepository : RepositoryBase<AuthorityDataSource>
|
||||
RETURNING id
|
||||
""";
|
||||
var id = token.Id == Guid.Empty ? Guid.NewGuid() : token.Id;
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("id", id);
|
||||
cmd.Parameters.AddWithValue("user_id", token.UserId);
|
||||
cmd.Parameters.AddWithValue("token_hash", token.TokenHash);
|
||||
AddNullableParameter(cmd, "access_token_id", token.AccessTokenId);
|
||||
AddNullableParameter(cmd, "client_id", token.ClientId);
|
||||
cmd.Parameters.AddWithValue("expires_at", token.ExpiresAt);
|
||||
AddJsonbParameter(cmd, "metadata", token.Metadata);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "id", id);
|
||||
AddParameter(command, "tenant_id", tenantId);
|
||||
AddParameter(command, "user_id", token.UserId);
|
||||
AddParameter(command, "token_hash", token.TokenHash);
|
||||
AddParameter(command, "access_token_id", token.AccessTokenId);
|
||||
AddParameter(command, "client_id", token.ClientId);
|
||||
AddParameter(command, "expires_at", token.ExpiresAt);
|
||||
AddJsonbParameter(command, "metadata", token.Metadata);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
return id;
|
||||
}
|
||||
|
||||
@@ -194,9 +205,10 @@ public sealed class RefreshTokenRepository : RepositoryBase<AuthorityDataSource>
|
||||
""";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("id", id);
|
||||
cmd.Parameters.AddWithValue("revoked_by", revokedBy);
|
||||
AddNullableParameter(cmd, "replaced_by", replacedBy);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "id", id);
|
||||
AddParameter(cmd, "revoked_by", revokedBy);
|
||||
AddParameter(cmd, "replaced_by", replacedBy);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -208,8 +220,9 @@ public sealed class RefreshTokenRepository : RepositoryBase<AuthorityDataSource>
|
||||
""";
|
||||
await ExecuteAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
cmd.Parameters.AddWithValue("user_id", userId);
|
||||
cmd.Parameters.AddWithValue("revoked_by", revokedBy);
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "user_id", userId);
|
||||
AddParameter(cmd, "revoked_by", revokedBy);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -217,24 +230,23 @@ public sealed class RefreshTokenRepository : RepositoryBase<AuthorityDataSource>
|
||||
{
|
||||
const string sql = "DELETE FROM authority.refresh_tokens WHERE expires_at < NOW() - INTERVAL '30 days'";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = connection.CreateCommand();
|
||||
command.CommandText = sql;
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static RefreshTokenEntity MapRefreshToken(System.Data.Common.DbDataReader reader) => new()
|
||||
private static RefreshTokenEntity MapRefreshToken(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
TenantId = reader.GetString(1),
|
||||
UserId = reader.GetGuid(2),
|
||||
TokenHash = reader.GetString(3),
|
||||
AccessTokenId = reader.IsDBNull(4) ? null : reader.GetGuid(4),
|
||||
ClientId = reader.IsDBNull(5) ? null : reader.GetString(5),
|
||||
AccessTokenId = GetNullableGuid(reader, 4),
|
||||
ClientId = GetNullableString(reader, 5),
|
||||
IssuedAt = reader.GetFieldValue<DateTimeOffset>(6),
|
||||
ExpiresAt = reader.GetFieldValue<DateTimeOffset>(7),
|
||||
RevokedAt = reader.IsDBNull(8) ? null : reader.GetFieldValue<DateTimeOffset>(8),
|
||||
RevokedBy = reader.IsDBNull(9) ? null : reader.GetString(9),
|
||||
ReplacedBy = reader.IsDBNull(10) ? null : reader.GetGuid(10),
|
||||
RevokedAt = GetNullableDateTimeOffset(reader, 8),
|
||||
RevokedBy = GetNullableString(reader, 9),
|
||||
ReplacedBy = GetNullableGuid(reader, 10),
|
||||
Metadata = reader.GetString(11)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -29,6 +29,13 @@ public static class ServiceCollectionExtensions
|
||||
// Register repositories
|
||||
services.AddScoped<ITenantRepository, TenantRepository>();
|
||||
services.AddScoped<IUserRepository, UserRepository>();
|
||||
services.AddScoped<IRoleRepository, RoleRepository>();
|
||||
services.AddScoped<IPermissionRepository, PermissionRepository>();
|
||||
services.AddScoped<ITokenRepository, TokenRepository>();
|
||||
services.AddScoped<IRefreshTokenRepository, RefreshTokenRepository>();
|
||||
services.AddScoped<IApiKeyRepository, ApiKeyRepository>();
|
||||
services.AddScoped<ISessionRepository, SessionRepository>();
|
||||
services.AddScoped<IAuditRepository, AuditRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
@@ -49,6 +56,13 @@ public static class ServiceCollectionExtensions
|
||||
// Register repositories
|
||||
services.AddScoped<ITenantRepository, TenantRepository>();
|
||||
services.AddScoped<IUserRepository, UserRepository>();
|
||||
services.AddScoped<IRoleRepository, RoleRepository>();
|
||||
services.AddScoped<IPermissionRepository, PermissionRepository>();
|
||||
services.AddScoped<ITokenRepository, TokenRepository>();
|
||||
services.AddScoped<IRefreshTokenRepository, RefreshTokenRepository>();
|
||||
services.AddScoped<IApiKeyRepository, ApiKeyRepository>();
|
||||
services.AddScoped<ISessionRepository, SessionRepository>();
|
||||
services.AddScoped<IAuditRepository, AuditRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,167 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(AuthorityPostgresCollection.Name)]
|
||||
public sealed class ApiKeyRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly AuthorityPostgresFixture _fixture;
|
||||
private readonly ApiKeyRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public ApiKeyRepositoryTests(AuthorityPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new AuthorityDataSource(Options.Create(options), NullLogger<AuthorityDataSource>.Instance);
|
||||
_repository = new ApiKeyRepository(dataSource, NullLogger<ApiKeyRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetByPrefix_RoundTripsApiKey()
|
||||
{
|
||||
// Arrange
|
||||
var keyPrefix = "sk_live_" + Guid.NewGuid().ToString("N")[..8];
|
||||
var apiKey = new ApiKeyEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
Name = "CI/CD Key",
|
||||
KeyHash = "sha256_key_" + Guid.NewGuid().ToString("N"),
|
||||
KeyPrefix = keyPrefix,
|
||||
Scopes = ["scan:read", "scan:write"],
|
||||
Status = ApiKeyStatus.Active,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddYears(1)
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(_tenantId, apiKey);
|
||||
var fetched = await _repository.GetByPrefixAsync(keyPrefix);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(apiKey.Id);
|
||||
fetched.Name.Should().Be("CI/CD Key");
|
||||
fetched.Scopes.Should().BeEquivalentTo(["scan:read", "scan:write"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetById_ReturnsApiKey()
|
||||
{
|
||||
// Arrange
|
||||
var apiKey = CreateApiKey(Guid.NewGuid(), "Test Key");
|
||||
await _repository.CreateAsync(_tenantId, apiKey);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, apiKey.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Name.Should().Be("Test Key");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByUserId_ReturnsUserApiKeys()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var key1 = CreateApiKey(userId, "Key 1");
|
||||
var key2 = CreateApiKey(userId, "Key 2");
|
||||
await _repository.CreateAsync(_tenantId, key1);
|
||||
await _repository.CreateAsync(_tenantId, key2);
|
||||
|
||||
// Act
|
||||
var keys = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
|
||||
// Assert
|
||||
keys.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAllKeysForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var key1 = CreateApiKey(Guid.NewGuid(), "Key A");
|
||||
var key2 = CreateApiKey(Guid.NewGuid(), "Key B");
|
||||
await _repository.CreateAsync(_tenantId, key1);
|
||||
await _repository.CreateAsync(_tenantId, key2);
|
||||
|
||||
// Act
|
||||
var keys = await _repository.ListAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
keys.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_UpdatesStatusAndRevokedFields()
|
||||
{
|
||||
// Arrange
|
||||
var apiKey = CreateApiKey(Guid.NewGuid(), "ToRevoke");
|
||||
await _repository.CreateAsync(_tenantId, apiKey);
|
||||
|
||||
// Act
|
||||
await _repository.RevokeAsync(_tenantId, apiKey.Id, "security@test.com");
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, apiKey.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.Status.Should().Be(ApiKeyStatus.Revoked);
|
||||
fetched.RevokedAt.Should().NotBeNull();
|
||||
fetched.RevokedBy.Should().Be("security@test.com");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateLastUsed_SetsLastUsedAt()
|
||||
{
|
||||
// Arrange
|
||||
var apiKey = CreateApiKey(Guid.NewGuid(), "Usage Test");
|
||||
await _repository.CreateAsync(_tenantId, apiKey);
|
||||
|
||||
// Act
|
||||
await _repository.UpdateLastUsedAsync(_tenantId, apiKey.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, apiKey.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.LastUsedAt.Should().NotBeNull();
|
||||
fetched.LastUsedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesApiKey()
|
||||
{
|
||||
// Arrange
|
||||
var apiKey = CreateApiKey(Guid.NewGuid(), "ToDelete");
|
||||
await _repository.CreateAsync(_tenantId, apiKey);
|
||||
|
||||
// Act
|
||||
await _repository.DeleteAsync(_tenantId, apiKey.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, apiKey.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
private ApiKeyEntity CreateApiKey(Guid userId, string name) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
Name = name,
|
||||
KeyHash = $"sha256_{Guid.NewGuid():N}",
|
||||
KeyPrefix = $"sk_test_{Guid.NewGuid():N}"[..16],
|
||||
Scopes = ["read"],
|
||||
Status = ApiKeyStatus.Active,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddYears(1)
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,192 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(AuthorityPostgresCollection.Name)]
|
||||
public sealed class AuditRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly AuthorityPostgresFixture _fixture;
|
||||
private readonly AuditRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public AuditRepositoryTests(AuthorityPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new AuthorityDataSource(Options.Create(options), NullLogger<AuthorityDataSource>.Instance);
|
||||
_repository = new AuditRepository(dataSource, NullLogger<AuditRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task Create_ReturnsGeneratedId()
|
||||
{
|
||||
// Arrange
|
||||
var audit = new AuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
Action = "user.login",
|
||||
ResourceType = "user",
|
||||
ResourceId = Guid.NewGuid().ToString(),
|
||||
IpAddress = "192.168.1.1",
|
||||
UserAgent = "Mozilla/5.0",
|
||||
CorrelationId = Guid.NewGuid().ToString()
|
||||
};
|
||||
|
||||
// Act
|
||||
var id = await _repository.CreateAsync(_tenantId, audit);
|
||||
|
||||
// Assert
|
||||
id.Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAuditEntriesOrderedByCreatedAtDesc()
|
||||
{
|
||||
// Arrange
|
||||
var audit1 = CreateAudit("action1");
|
||||
var audit2 = CreateAudit("action2");
|
||||
await _repository.CreateAsync(_tenantId, audit1);
|
||||
await Task.Delay(10); // Ensure different timestamps
|
||||
await _repository.CreateAsync(_tenantId, audit2);
|
||||
|
||||
// Act
|
||||
var audits = await _repository.ListAsync(_tenantId, limit: 10);
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(2);
|
||||
audits[0].Action.Should().Be("action2"); // Most recent first
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByUserId_ReturnsUserAudits()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var audit = new AuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
Action = "user.action",
|
||||
ResourceType = "test"
|
||||
};
|
||||
await _repository.CreateAsync(_tenantId, audit);
|
||||
|
||||
// Act
|
||||
var audits = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(1);
|
||||
audits[0].UserId.Should().Be(userId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByResource_ReturnsResourceAudits()
|
||||
{
|
||||
// Arrange
|
||||
var resourceId = Guid.NewGuid().ToString();
|
||||
var audit = new AuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "resource.update",
|
||||
ResourceType = "role",
|
||||
ResourceId = resourceId
|
||||
};
|
||||
await _repository.CreateAsync(_tenantId, audit);
|
||||
|
||||
// Act
|
||||
var audits = await _repository.GetByResourceAsync(_tenantId, "role", resourceId);
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(1);
|
||||
audits[0].ResourceId.Should().Be(resourceId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCorrelationId_ReturnsCorrelatedAudits()
|
||||
{
|
||||
// Arrange
|
||||
var correlationId = Guid.NewGuid().ToString();
|
||||
var audit1 = new AuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "step1",
|
||||
ResourceType = "test",
|
||||
CorrelationId = correlationId
|
||||
};
|
||||
var audit2 = new AuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "step2",
|
||||
ResourceType = "test",
|
||||
CorrelationId = correlationId
|
||||
};
|
||||
await _repository.CreateAsync(_tenantId, audit1);
|
||||
await _repository.CreateAsync(_tenantId, audit2);
|
||||
|
||||
// Act
|
||||
var audits = await _repository.GetByCorrelationIdAsync(_tenantId, correlationId);
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(2);
|
||||
audits.Should().AllSatisfy(a => a.CorrelationId.Should().Be(correlationId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByAction_ReturnsMatchingAudits()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.CreateAsync(_tenantId, CreateAudit("user.login"));
|
||||
await _repository.CreateAsync(_tenantId, CreateAudit("user.logout"));
|
||||
await _repository.CreateAsync(_tenantId, CreateAudit("user.login"));
|
||||
|
||||
// Act
|
||||
var audits = await _repository.GetByActionAsync(_tenantId, "user.login");
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(2);
|
||||
audits.Should().AllSatisfy(a => a.Action.Should().Be("user.login"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Create_StoresJsonbValues()
|
||||
{
|
||||
// Arrange
|
||||
var audit = new AuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "config.update",
|
||||
ResourceType = "config",
|
||||
OldValue = "{\"setting\": \"old\"}",
|
||||
NewValue = "{\"setting\": \"new\"}"
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(_tenantId, audit);
|
||||
var audits = await _repository.GetByActionAsync(_tenantId, "config.update");
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(1);
|
||||
audits[0].OldValue.Should().Contain("old");
|
||||
audits[0].NewValue.Should().Contain("new");
|
||||
}
|
||||
|
||||
private AuditEntity CreateAudit(string action) => new()
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
Action = action,
|
||||
ResourceType = "test",
|
||||
ResourceId = Guid.NewGuid().ToString()
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,133 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(AuthorityPostgresCollection.Name)]
|
||||
public sealed class PermissionRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly AuthorityPostgresFixture _fixture;
|
||||
private readonly PermissionRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public PermissionRepositoryTests(AuthorityPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new AuthorityDataSource(Options.Create(options), NullLogger<AuthorityDataSource>.Instance);
|
||||
_repository = new PermissionRepository(dataSource, NullLogger<PermissionRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGet_RoundTripsPermission()
|
||||
{
|
||||
// Arrange
|
||||
var permission = new PermissionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "users:read",
|
||||
Resource = "users",
|
||||
Action = "read",
|
||||
Description = "Read user data"
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(_tenantId, permission);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, permission.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Name.Should().Be("users:read");
|
||||
fetched.Resource.Should().Be("users");
|
||||
fetched.Action.Should().Be("read");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByName_ReturnsCorrectPermission()
|
||||
{
|
||||
// Arrange
|
||||
var permission = new PermissionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "roles:write",
|
||||
Resource = "roles",
|
||||
Action = "write"
|
||||
};
|
||||
await _repository.CreateAsync(_tenantId, permission);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "roles:write");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(permission.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAllPermissionsForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var perm1 = new PermissionEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "p1", Resource = "r1", Action = "a1" };
|
||||
var perm2 = new PermissionEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "p2", Resource = "r2", Action = "a2" };
|
||||
await _repository.CreateAsync(_tenantId, perm1);
|
||||
await _repository.CreateAsync(_tenantId, perm2);
|
||||
|
||||
// Act
|
||||
var permissions = await _repository.ListAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
permissions.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByResource_ReturnsResourcePermissions()
|
||||
{
|
||||
// Arrange
|
||||
var perm1 = new PermissionEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "scans:read", Resource = "scans", Action = "read" };
|
||||
var perm2 = new PermissionEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "scans:write", Resource = "scans", Action = "write" };
|
||||
var perm3 = new PermissionEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "users:read", Resource = "users", Action = "read" };
|
||||
await _repository.CreateAsync(_tenantId, perm1);
|
||||
await _repository.CreateAsync(_tenantId, perm2);
|
||||
await _repository.CreateAsync(_tenantId, perm3);
|
||||
|
||||
// Act
|
||||
var permissions = await _repository.GetByResourceAsync(_tenantId, "scans");
|
||||
|
||||
// Assert
|
||||
permissions.Should().HaveCount(2);
|
||||
permissions.Should().AllSatisfy(p => p.Resource.Should().Be("scans"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesPermission()
|
||||
{
|
||||
// Arrange
|
||||
var permission = new PermissionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "temp:delete",
|
||||
Resource = "temp",
|
||||
Action = "delete"
|
||||
};
|
||||
await _repository.CreateAsync(_tenantId, permission);
|
||||
|
||||
// Act
|
||||
await _repository.DeleteAsync(_tenantId, permission.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, permission.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,148 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(AuthorityPostgresCollection.Name)]
|
||||
public sealed class RefreshTokenRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly AuthorityPostgresFixture _fixture;
|
||||
private readonly RefreshTokenRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public RefreshTokenRepositoryTests(AuthorityPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new AuthorityDataSource(Options.Create(options), NullLogger<AuthorityDataSource>.Instance);
|
||||
_repository = new RefreshTokenRepository(dataSource, NullLogger<RefreshTokenRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetByHash_RoundTripsRefreshToken()
|
||||
{
|
||||
// Arrange
|
||||
var token = new RefreshTokenEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
TokenHash = "refresh_hash_" + Guid.NewGuid().ToString("N"),
|
||||
AccessTokenId = Guid.NewGuid(),
|
||||
ClientId = "web-app",
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(30)
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
var fetched = await _repository.GetByHashAsync(token.TokenHash);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(token.Id);
|
||||
fetched.ClientId.Should().Be("web-app");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetById_ReturnsToken()
|
||||
{
|
||||
// Arrange
|
||||
var token = CreateRefreshToken(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, token.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(token.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByUserId_ReturnsUserTokens()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var token1 = CreateRefreshToken(userId);
|
||||
var token2 = CreateRefreshToken(userId);
|
||||
await _repository.CreateAsync(_tenantId, token1);
|
||||
await _repository.CreateAsync(_tenantId, token2);
|
||||
|
||||
// Act
|
||||
var tokens = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
|
||||
// Assert
|
||||
tokens.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_SetsRevokedFields()
|
||||
{
|
||||
// Arrange
|
||||
var token = CreateRefreshToken(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
|
||||
// Act
|
||||
await _repository.RevokeAsync(_tenantId, token.Id, "admin@test.com", null);
|
||||
var fetched = await _repository.GetByHashAsync(token.TokenHash);
|
||||
|
||||
// Assert
|
||||
fetched!.RevokedAt.Should().NotBeNull();
|
||||
fetched.RevokedBy.Should().Be("admin@test.com");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_WithReplacedBy_SetsReplacedByField()
|
||||
{
|
||||
// Arrange
|
||||
var token = CreateRefreshToken(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
var newTokenId = Guid.NewGuid();
|
||||
|
||||
// Act
|
||||
await _repository.RevokeAsync(_tenantId, token.Id, "rotation", newTokenId);
|
||||
var fetched = await _repository.GetByHashAsync(token.TokenHash);
|
||||
|
||||
// Assert
|
||||
fetched!.RevokedAt.Should().NotBeNull();
|
||||
fetched.ReplacedBy.Should().Be(newTokenId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RevokeByUserId_RevokesAllUserTokens()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var token1 = CreateRefreshToken(userId);
|
||||
var token2 = CreateRefreshToken(userId);
|
||||
await _repository.CreateAsync(_tenantId, token1);
|
||||
await _repository.CreateAsync(_tenantId, token2);
|
||||
|
||||
// Act
|
||||
await _repository.RevokeByUserIdAsync(_tenantId, userId, "security_action");
|
||||
var tokens = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
|
||||
// Assert
|
||||
tokens.Should().AllSatisfy(t => t.RevokedAt.Should().NotBeNull());
|
||||
}
|
||||
|
||||
private RefreshTokenEntity CreateRefreshToken(Guid userId) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
TokenHash = $"refresh_{Guid.NewGuid():N}",
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(30)
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,140 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(AuthorityPostgresCollection.Name)]
|
||||
public sealed class RoleRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly AuthorityPostgresFixture _fixture;
|
||||
private readonly RoleRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public RoleRepositoryTests(AuthorityPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new AuthorityDataSource(Options.Create(options), NullLogger<AuthorityDataSource>.Instance);
|
||||
_repository = new RoleRepository(dataSource, NullLogger<RoleRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGet_RoundTripsRole()
|
||||
{
|
||||
// Arrange
|
||||
var role = new RoleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "admin",
|
||||
DisplayName = "Administrator",
|
||||
Description = "Full system access",
|
||||
IsSystem = true,
|
||||
Metadata = "{\"level\": 1}"
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(_tenantId, role);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, role.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(role.Id);
|
||||
fetched.Name.Should().Be("admin");
|
||||
fetched.DisplayName.Should().Be("Administrator");
|
||||
fetched.IsSystem.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByName_ReturnsCorrectRole()
|
||||
{
|
||||
// Arrange
|
||||
var role = new RoleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "viewer",
|
||||
DisplayName = "Viewer",
|
||||
Description = "Read-only access"
|
||||
};
|
||||
await _repository.CreateAsync(_tenantId, role);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "viewer");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(role.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAllRolesForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var role1 = new RoleEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "role1" };
|
||||
var role2 = new RoleEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "role2" };
|
||||
await _repository.CreateAsync(_tenantId, role1);
|
||||
await _repository.CreateAsync(_tenantId, role2);
|
||||
|
||||
// Act
|
||||
var roles = await _repository.ListAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
roles.Should().HaveCount(2);
|
||||
roles.Select(r => r.Name).Should().Contain(["role1", "role2"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Update_ModifiesRole()
|
||||
{
|
||||
// Arrange
|
||||
var role = new RoleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "editor",
|
||||
DisplayName = "Editor"
|
||||
};
|
||||
await _repository.CreateAsync(_tenantId, role);
|
||||
|
||||
// Act
|
||||
var updated = new RoleEntity
|
||||
{
|
||||
Id = role.Id,
|
||||
TenantId = _tenantId,
|
||||
Name = "editor",
|
||||
DisplayName = "Content Editor",
|
||||
Description = "Updated description"
|
||||
};
|
||||
await _repository.UpdateAsync(_tenantId, updated);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, role.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.DisplayName.Should().Be("Content Editor");
|
||||
fetched.Description.Should().Be("Updated description");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesRole()
|
||||
{
|
||||
// Arrange
|
||||
var role = new RoleEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "temp" };
|
||||
await _repository.CreateAsync(_tenantId, role);
|
||||
|
||||
// Act
|
||||
await _repository.DeleteAsync(_tenantId, role.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, role.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,179 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(AuthorityPostgresCollection.Name)]
|
||||
public sealed class SessionRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly AuthorityPostgresFixture _fixture;
|
||||
private readonly SessionRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public SessionRepositoryTests(AuthorityPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new AuthorityDataSource(Options.Create(options), NullLogger<AuthorityDataSource>.Instance);
|
||||
_repository = new SessionRepository(dataSource, NullLogger<SessionRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGet_RoundTripsSession()
|
||||
{
|
||||
// Arrange
|
||||
var session = new SessionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
SessionTokenHash = "session_hash_" + Guid.NewGuid().ToString("N"),
|
||||
IpAddress = "192.168.1.1",
|
||||
UserAgent = "Mozilla/5.0",
|
||||
StartedAt = DateTimeOffset.UtcNow,
|
||||
LastActivityAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(7)
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(_tenantId, session);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, session.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(session.Id);
|
||||
fetched.IpAddress.Should().Be("192.168.1.1");
|
||||
fetched.UserAgent.Should().Be("Mozilla/5.0");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByTokenHash_ReturnsSession()
|
||||
{
|
||||
// Arrange
|
||||
var tokenHash = "lookup_hash_" + Guid.NewGuid().ToString("N");
|
||||
var session = new SessionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
SessionTokenHash = tokenHash,
|
||||
StartedAt = DateTimeOffset.UtcNow,
|
||||
LastActivityAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(7)
|
||||
};
|
||||
await _repository.CreateAsync(_tenantId, session);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByTokenHashAsync(tokenHash);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(session.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByUserId_WithActiveOnly_ReturnsOnlyActiveSessions()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var activeSession = CreateSession(userId);
|
||||
var endedSession = new SessionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
SessionTokenHash = "ended_" + Guid.NewGuid().ToString("N"),
|
||||
StartedAt = DateTimeOffset.UtcNow.AddHours(-2),
|
||||
LastActivityAt = DateTimeOffset.UtcNow.AddHours(-1),
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(7),
|
||||
EndedAt = DateTimeOffset.UtcNow,
|
||||
EndReason = "logout"
|
||||
};
|
||||
|
||||
await _repository.CreateAsync(_tenantId, activeSession);
|
||||
await _repository.CreateAsync(_tenantId, endedSession);
|
||||
|
||||
// Act
|
||||
var activeSessions = await _repository.GetByUserIdAsync(_tenantId, userId, activeOnly: true);
|
||||
var allSessions = await _repository.GetByUserIdAsync(_tenantId, userId, activeOnly: false);
|
||||
|
||||
// Assert
|
||||
activeSessions.Should().HaveCount(1);
|
||||
allSessions.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateLastActivity_UpdatesTimestamp()
|
||||
{
|
||||
// Arrange
|
||||
var session = CreateSession(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, session);
|
||||
|
||||
// Act
|
||||
await Task.Delay(100); // Ensure time difference
|
||||
await _repository.UpdateLastActivityAsync(_tenantId, session.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, session.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.LastActivityAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task End_SetsEndFieldsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var session = CreateSession(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, session);
|
||||
|
||||
// Act
|
||||
await _repository.EndAsync(_tenantId, session.Id, "session_timeout");
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, session.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.EndedAt.Should().NotBeNull();
|
||||
fetched.EndReason.Should().Be("session_timeout");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EndByUserId_EndsAllUserSessions()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var session1 = CreateSession(userId);
|
||||
var session2 = CreateSession(userId);
|
||||
await _repository.CreateAsync(_tenantId, session1);
|
||||
await _repository.CreateAsync(_tenantId, session2);
|
||||
|
||||
// Act
|
||||
await _repository.EndByUserIdAsync(_tenantId, userId, "forced_logout");
|
||||
var sessions = await _repository.GetByUserIdAsync(_tenantId, userId, activeOnly: false);
|
||||
|
||||
// Assert
|
||||
sessions.Should().HaveCount(2);
|
||||
sessions.Should().AllSatisfy(s =>
|
||||
{
|
||||
s.EndedAt.Should().NotBeNull();
|
||||
s.EndReason.Should().Be("forced_logout");
|
||||
});
|
||||
}
|
||||
|
||||
private SessionEntity CreateSession(Guid userId) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
SessionTokenHash = $"session_{Guid.NewGuid():N}",
|
||||
StartedAt = DateTimeOffset.UtcNow,
|
||||
LastActivityAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(7)
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,135 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(AuthorityPostgresCollection.Name)]
|
||||
public sealed class TokenRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly AuthorityPostgresFixture _fixture;
|
||||
private readonly TokenRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public TokenRepositoryTests(AuthorityPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new AuthorityDataSource(Options.Create(options), NullLogger<AuthorityDataSource>.Instance);
|
||||
_repository = new TokenRepository(dataSource, NullLogger<TokenRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetByHash_RoundTripsToken()
|
||||
{
|
||||
// Arrange
|
||||
var token = new TokenEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
TokenHash = "sha256_hash_" + Guid.NewGuid().ToString("N"),
|
||||
TokenType = TokenType.Access,
|
||||
Scopes = ["read", "write"],
|
||||
ClientId = "web-app",
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddHours(1)
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
var fetched = await _repository.GetByHashAsync(token.TokenHash);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(token.Id);
|
||||
fetched.TokenType.Should().Be(TokenType.Access);
|
||||
fetched.Scopes.Should().BeEquivalentTo(["read", "write"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetById_ReturnsToken()
|
||||
{
|
||||
// Arrange
|
||||
var token = CreateToken(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, token.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(token.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByUserId_ReturnsUserTokens()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var token1 = CreateToken(userId);
|
||||
var token2 = CreateToken(userId);
|
||||
await _repository.CreateAsync(_tenantId, token1);
|
||||
await _repository.CreateAsync(_tenantId, token2);
|
||||
|
||||
// Act
|
||||
var tokens = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
|
||||
// Assert
|
||||
tokens.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_SetsRevokedFields()
|
||||
{
|
||||
// Arrange
|
||||
var token = CreateToken(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
|
||||
// Act
|
||||
await _repository.RevokeAsync(_tenantId, token.Id, "admin@test.com");
|
||||
var fetched = await _repository.GetByHashAsync(token.TokenHash);
|
||||
|
||||
// Assert
|
||||
fetched!.RevokedAt.Should().NotBeNull();
|
||||
fetched.RevokedBy.Should().Be("admin@test.com");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RevokeByUserId_RevokesAllUserTokens()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var token1 = CreateToken(userId);
|
||||
var token2 = CreateToken(userId);
|
||||
await _repository.CreateAsync(_tenantId, token1);
|
||||
await _repository.CreateAsync(_tenantId, token2);
|
||||
|
||||
// Act
|
||||
await _repository.RevokeByUserIdAsync(_tenantId, userId, "security_action");
|
||||
var tokens = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
|
||||
// Assert
|
||||
tokens.Should().AllSatisfy(t => t.RevokedAt.Should().NotBeNull());
|
||||
}
|
||||
|
||||
private TokenEntity CreateToken(Guid userId) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
TokenHash = $"sha256_{Guid.NewGuid():N}",
|
||||
TokenType = TokenType.Access,
|
||||
Scopes = ["read"],
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddHours(1)
|
||||
};
|
||||
}
|
||||
@@ -29,6 +29,17 @@ public static class ServiceCollectionExtensions
|
||||
// Register repositories
|
||||
services.AddScoped<IChannelRepository, ChannelRepository>();
|
||||
services.AddScoped<IDeliveryRepository, DeliveryRepository>();
|
||||
services.AddScoped<IRuleRepository, RuleRepository>();
|
||||
services.AddScoped<ITemplateRepository, TemplateRepository>();
|
||||
services.AddScoped<IDigestRepository, DigestRepository>();
|
||||
services.AddScoped<IQuietHoursRepository, QuietHoursRepository>();
|
||||
services.AddScoped<IMaintenanceWindowRepository, MaintenanceWindowRepository>();
|
||||
services.AddScoped<IEscalationPolicyRepository, EscalationPolicyRepository>();
|
||||
services.AddScoped<IEscalationStateRepository, EscalationStateRepository>();
|
||||
services.AddScoped<IOnCallScheduleRepository, OnCallScheduleRepository>();
|
||||
services.AddScoped<IInboxRepository, InboxRepository>();
|
||||
services.AddScoped<IIncidentRepository, IncidentRepository>();
|
||||
services.AddScoped<INotifyAuditRepository, NotifyAuditRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
@@ -49,6 +60,17 @@ public static class ServiceCollectionExtensions
|
||||
// Register repositories
|
||||
services.AddScoped<IChannelRepository, ChannelRepository>();
|
||||
services.AddScoped<IDeliveryRepository, DeliveryRepository>();
|
||||
services.AddScoped<IRuleRepository, RuleRepository>();
|
||||
services.AddScoped<ITemplateRepository, TemplateRepository>();
|
||||
services.AddScoped<IDigestRepository, DigestRepository>();
|
||||
services.AddScoped<IQuietHoursRepository, QuietHoursRepository>();
|
||||
services.AddScoped<IMaintenanceWindowRepository, MaintenanceWindowRepository>();
|
||||
services.AddScoped<IEscalationPolicyRepository, EscalationPolicyRepository>();
|
||||
services.AddScoped<IEscalationStateRepository, EscalationStateRepository>();
|
||||
services.AddScoped<IOnCallScheduleRepository, OnCallScheduleRepository>();
|
||||
services.AddScoped<IInboxRepository, InboxRepository>();
|
||||
services.AddScoped<IIncidentRepository, IncidentRepository>();
|
||||
services.AddScoped<INotifyAuditRepository, NotifyAuditRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,204 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
using StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(NotifyPostgresCollection.Name)]
|
||||
public sealed class ChannelRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly NotifyPostgresFixture _fixture;
|
||||
private readonly ChannelRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public ChannelRepositoryTests(NotifyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new NotifyDataSource(Options.Create(options), NullLogger<NotifyDataSource>.Instance);
|
||||
_repository = new ChannelRepository(dataSource, NullLogger<ChannelRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetById_RoundTripsChannel()
|
||||
{
|
||||
// Arrange
|
||||
var channel = new ChannelEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "email-primary",
|
||||
ChannelType = ChannelType.Email,
|
||||
Enabled = true,
|
||||
Config = "{\"smtpHost\": \"smtp.example.com\"}"
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(channel);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, channel.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(channel.Id);
|
||||
fetched.Name.Should().Be("email-primary");
|
||||
fetched.ChannelType.Should().Be(ChannelType.Email);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByName_ReturnsCorrectChannel()
|
||||
{
|
||||
// Arrange
|
||||
var channel = CreateChannel("slack-alerts", ChannelType.Slack);
|
||||
await _repository.CreateAsync(channel);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "slack-alerts");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(channel.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAll_ReturnsAllChannelsForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var channel1 = CreateChannel("channel1", ChannelType.Email);
|
||||
var channel2 = CreateChannel("channel2", ChannelType.Slack);
|
||||
await _repository.CreateAsync(channel1);
|
||||
await _repository.CreateAsync(channel2);
|
||||
|
||||
// Act
|
||||
var channels = await _repository.GetAllAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
channels.Should().HaveCount(2);
|
||||
channels.Select(c => c.Name).Should().Contain(["channel1", "channel2"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAll_FiltersByEnabled()
|
||||
{
|
||||
// Arrange
|
||||
var enabledChannel = CreateChannel("enabled", ChannelType.Email);
|
||||
var disabledChannel = new ChannelEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "disabled",
|
||||
ChannelType = ChannelType.Email,
|
||||
Enabled = false
|
||||
};
|
||||
await _repository.CreateAsync(enabledChannel);
|
||||
await _repository.CreateAsync(disabledChannel);
|
||||
|
||||
// Act
|
||||
var enabledChannels = await _repository.GetAllAsync(_tenantId, enabled: true);
|
||||
|
||||
// Assert
|
||||
enabledChannels.Should().HaveCount(1);
|
||||
enabledChannels[0].Name.Should().Be("enabled");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAll_FiltersByChannelType()
|
||||
{
|
||||
// Arrange
|
||||
var emailChannel = CreateChannel("email", ChannelType.Email);
|
||||
var slackChannel = CreateChannel("slack", ChannelType.Slack);
|
||||
await _repository.CreateAsync(emailChannel);
|
||||
await _repository.CreateAsync(slackChannel);
|
||||
|
||||
// Act
|
||||
var slackChannels = await _repository.GetAllAsync(_tenantId, channelType: ChannelType.Slack);
|
||||
|
||||
// Assert
|
||||
slackChannels.Should().HaveCount(1);
|
||||
slackChannels[0].Name.Should().Be("slack");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Update_ModifiesChannel()
|
||||
{
|
||||
// Arrange
|
||||
var channel = CreateChannel("update-test", ChannelType.Email);
|
||||
await _repository.CreateAsync(channel);
|
||||
|
||||
// Act
|
||||
var updated = new ChannelEntity
|
||||
{
|
||||
Id = channel.Id,
|
||||
TenantId = _tenantId,
|
||||
Name = "update-test",
|
||||
ChannelType = ChannelType.Email,
|
||||
Enabled = false,
|
||||
Config = "{\"updated\": true}"
|
||||
};
|
||||
var result = await _repository.UpdateAsync(updated);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, channel.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Enabled.Should().BeFalse();
|
||||
fetched.Config.Should().Contain("updated");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesChannel()
|
||||
{
|
||||
// Arrange
|
||||
var channel = CreateChannel("delete-test", ChannelType.Email);
|
||||
await _repository.CreateAsync(channel);
|
||||
|
||||
// Act
|
||||
var result = await _repository.DeleteAsync(_tenantId, channel.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, channel.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEnabledByType_ReturnsOnlyEnabledChannelsOfType()
|
||||
{
|
||||
// Arrange
|
||||
var enabledEmail = CreateChannel("enabled-email", ChannelType.Email);
|
||||
var disabledEmail = new ChannelEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "disabled-email",
|
||||
ChannelType = ChannelType.Email,
|
||||
Enabled = false
|
||||
};
|
||||
var enabledSlack = CreateChannel("enabled-slack", ChannelType.Slack);
|
||||
await _repository.CreateAsync(enabledEmail);
|
||||
await _repository.CreateAsync(disabledEmail);
|
||||
await _repository.CreateAsync(enabledSlack);
|
||||
|
||||
// Act
|
||||
var channels = await _repository.GetEnabledByTypeAsync(_tenantId, ChannelType.Email);
|
||||
|
||||
// Assert
|
||||
channels.Should().HaveCount(1);
|
||||
channels[0].Name.Should().Be("enabled-email");
|
||||
}
|
||||
|
||||
private ChannelEntity CreateChannel(string name, ChannelType type) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = name,
|
||||
ChannelType = type,
|
||||
Enabled = true
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,204 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
using StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(NotifyPostgresCollection.Name)]
|
||||
public sealed class DeliveryRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly NotifyPostgresFixture _fixture;
|
||||
private readonly DeliveryRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public DeliveryRepositoryTests(NotifyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new NotifyDataSource(Options.Create(options), NullLogger<NotifyDataSource>.Instance);
|
||||
_repository = new DeliveryRepository(dataSource, NullLogger<DeliveryRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetById_RoundTripsDelivery()
|
||||
{
|
||||
// Arrange
|
||||
var delivery = CreateDelivery();
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(delivery);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, delivery.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(delivery.Id);
|
||||
fetched.Recipient.Should().Be("user@example.com");
|
||||
fetched.Status.Should().Be(DeliveryStatus.Pending);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPending_ReturnsPendingDeliveries()
|
||||
{
|
||||
// Arrange
|
||||
var pending = CreateDelivery();
|
||||
await _repository.CreateAsync(pending);
|
||||
|
||||
// Act
|
||||
var pendingDeliveries = await _repository.GetPendingAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
pendingDeliveries.Should().HaveCount(1);
|
||||
pendingDeliveries[0].Id.Should().Be(pending.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByStatus_ReturnsDeliveriesWithStatus()
|
||||
{
|
||||
// Arrange
|
||||
var delivery = CreateDelivery();
|
||||
await _repository.CreateAsync(delivery);
|
||||
|
||||
// Act
|
||||
var deliveries = await _repository.GetByStatusAsync(_tenantId, DeliveryStatus.Pending);
|
||||
|
||||
// Assert
|
||||
deliveries.Should().HaveCount(1);
|
||||
deliveries[0].Status.Should().Be(DeliveryStatus.Pending);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCorrelationId_ReturnsCorrelatedDeliveries()
|
||||
{
|
||||
// Arrange
|
||||
var correlationId = Guid.NewGuid().ToString();
|
||||
var delivery = new DeliveryEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
Recipient = "user@example.com",
|
||||
EventType = "scan.completed",
|
||||
CorrelationId = correlationId
|
||||
};
|
||||
await _repository.CreateAsync(delivery);
|
||||
|
||||
// Act
|
||||
var deliveries = await _repository.GetByCorrelationIdAsync(_tenantId, correlationId);
|
||||
|
||||
// Assert
|
||||
deliveries.Should().HaveCount(1);
|
||||
deliveries[0].CorrelationId.Should().Be(correlationId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkQueued_UpdatesStatus()
|
||||
{
|
||||
// Arrange
|
||||
var delivery = CreateDelivery();
|
||||
await _repository.CreateAsync(delivery);
|
||||
|
||||
// Act
|
||||
var result = await _repository.MarkQueuedAsync(_tenantId, delivery.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, delivery.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Status.Should().Be(DeliveryStatus.Queued);
|
||||
fetched.QueuedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkSent_UpdatesStatusAndExternalId()
|
||||
{
|
||||
// Arrange
|
||||
var delivery = CreateDelivery();
|
||||
await _repository.CreateAsync(delivery);
|
||||
await _repository.MarkQueuedAsync(_tenantId, delivery.Id);
|
||||
|
||||
// Act
|
||||
var result = await _repository.MarkSentAsync(_tenantId, delivery.Id, "external-123");
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, delivery.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Status.Should().Be(DeliveryStatus.Sent);
|
||||
fetched.ExternalId.Should().Be("external-123");
|
||||
fetched.SentAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkDelivered_UpdatesStatus()
|
||||
{
|
||||
// Arrange
|
||||
var delivery = CreateDelivery();
|
||||
await _repository.CreateAsync(delivery);
|
||||
await _repository.MarkSentAsync(_tenantId, delivery.Id);
|
||||
|
||||
// Act
|
||||
var result = await _repository.MarkDeliveredAsync(_tenantId, delivery.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, delivery.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Status.Should().Be(DeliveryStatus.Delivered);
|
||||
fetched.DeliveredAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkFailed_UpdatesStatusAndError()
|
||||
{
|
||||
// Arrange
|
||||
var delivery = CreateDelivery();
|
||||
await _repository.CreateAsync(delivery);
|
||||
|
||||
// Act
|
||||
var result = await _repository.MarkFailedAsync(_tenantId, delivery.Id, "Connection timeout", TimeSpan.FromMinutes(5));
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, delivery.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Status.Should().Be(DeliveryStatus.Failed);
|
||||
fetched.ErrorMessage.Should().Be("Connection timeout");
|
||||
fetched.FailedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStats_ReturnsCorrectCounts()
|
||||
{
|
||||
// Arrange
|
||||
var delivery1 = CreateDelivery();
|
||||
var delivery2 = CreateDelivery();
|
||||
await _repository.CreateAsync(delivery1);
|
||||
await _repository.CreateAsync(delivery2);
|
||||
await _repository.MarkSentAsync(_tenantId, delivery2.Id);
|
||||
|
||||
var from = DateTimeOffset.UtcNow.AddHours(-1);
|
||||
var to = DateTimeOffset.UtcNow.AddHours(1);
|
||||
|
||||
// Act
|
||||
var stats = await _repository.GetStatsAsync(_tenantId, from, to);
|
||||
|
||||
// Assert
|
||||
stats.Total.Should().Be(2);
|
||||
stats.Pending.Should().Be(1);
|
||||
stats.Sent.Should().Be(1);
|
||||
}
|
||||
|
||||
private DeliveryEntity CreateDelivery() => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
Recipient = "user@example.com",
|
||||
EventType = "scan.completed",
|
||||
Status = DeliveryStatus.Pending
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,191 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
using StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(NotifyPostgresCollection.Name)]
|
||||
public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly NotifyPostgresFixture _fixture;
|
||||
private readonly DigestRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public DigestRepositoryTests(NotifyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new NotifyDataSource(Options.Create(options), NullLogger<NotifyDataSource>.Instance);
|
||||
_repository = new DigestRepository(dataSource, NullLogger<DigestRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndGetById_RoundTripsDigest()
|
||||
{
|
||||
// Arrange
|
||||
var digest = new DigestEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
Recipient = "user@example.com",
|
||||
DigestKey = "daily-summary",
|
||||
EventCount = 0,
|
||||
Status = DigestStatus.Collecting,
|
||||
CollectUntil = DateTimeOffset.UtcNow.AddHours(1)
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.UpsertAsync(digest);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, digest.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(digest.Id);
|
||||
fetched.DigestKey.Should().Be("daily-summary");
|
||||
fetched.Status.Should().Be(DigestStatus.Collecting);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByKey_ReturnsCorrectDigest()
|
||||
{
|
||||
// Arrange
|
||||
var channelId = Guid.NewGuid();
|
||||
var digest = new DigestEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = channelId,
|
||||
Recipient = "user@example.com",
|
||||
DigestKey = "weekly-report",
|
||||
CollectUntil = DateTimeOffset.UtcNow.AddDays(7)
|
||||
};
|
||||
await _repository.UpsertAsync(digest);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByKeyAsync(_tenantId, channelId, "user@example.com", "weekly-report");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(digest.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AddEvent_IncrementsEventCount()
|
||||
{
|
||||
// Arrange
|
||||
var digest = CreateDigest("event-test");
|
||||
await _repository.UpsertAsync(digest);
|
||||
|
||||
// Act
|
||||
await _repository.AddEventAsync(_tenantId, digest.Id, "{\"type\": \"test\"}");
|
||||
await _repository.AddEventAsync(_tenantId, digest.Id, "{\"type\": \"test2\"}");
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, digest.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.EventCount.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetReadyToSend_ReturnsDigestsReadyToSend()
|
||||
{
|
||||
// Arrange - One ready digest (past CollectUntil), one not ready
|
||||
var readyDigest = new DigestEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
Recipient = "ready@example.com",
|
||||
DigestKey = "ready",
|
||||
Status = DigestStatus.Collecting,
|
||||
CollectUntil = DateTimeOffset.UtcNow.AddMinutes(-1)
|
||||
};
|
||||
var notReadyDigest = new DigestEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
Recipient = "notready@example.com",
|
||||
DigestKey = "notready",
|
||||
Status = DigestStatus.Collecting,
|
||||
CollectUntil = DateTimeOffset.UtcNow.AddHours(1)
|
||||
};
|
||||
await _repository.UpsertAsync(readyDigest);
|
||||
await _repository.UpsertAsync(notReadyDigest);
|
||||
|
||||
// Act
|
||||
var readyDigests = await _repository.GetReadyToSendAsync();
|
||||
|
||||
// Assert
|
||||
readyDigests.Should().HaveCount(1);
|
||||
readyDigests[0].DigestKey.Should().Be("ready");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkSending_UpdatesStatus()
|
||||
{
|
||||
// Arrange
|
||||
var digest = CreateDigest("sending-test");
|
||||
await _repository.UpsertAsync(digest);
|
||||
|
||||
// Act
|
||||
var result = await _repository.MarkSendingAsync(_tenantId, digest.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, digest.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Status.Should().Be(DigestStatus.Sending);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkSent_UpdatesStatusAndSentAt()
|
||||
{
|
||||
// Arrange
|
||||
var digest = CreateDigest("sent-test");
|
||||
await _repository.UpsertAsync(digest);
|
||||
await _repository.MarkSendingAsync(_tenantId, digest.Id);
|
||||
|
||||
// Act
|
||||
var result = await _repository.MarkSentAsync(_tenantId, digest.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, digest.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Status.Should().Be(DigestStatus.Sent);
|
||||
fetched.SentAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteOld_RemovesOldDigests()
|
||||
{
|
||||
// Arrange
|
||||
var digest = CreateDigest("old-digest");
|
||||
await _repository.UpsertAsync(digest);
|
||||
|
||||
// Act - Delete digests older than future date
|
||||
var cutoff = DateTimeOffset.UtcNow.AddMinutes(1);
|
||||
var count = await _repository.DeleteOldAsync(cutoff);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(1);
|
||||
}
|
||||
|
||||
private DigestEntity CreateDigest(string key) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
Recipient = "user@example.com",
|
||||
DigestKey = key,
|
||||
Status = DigestStatus.Collecting,
|
||||
CollectUntil = DateTimeOffset.UtcNow.AddHours(1)
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,208 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
using StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(NotifyPostgresCollection.Name)]
|
||||
public sealed class InboxRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly NotifyPostgresFixture _fixture;
|
||||
private readonly InboxRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public InboxRepositoryTests(NotifyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new NotifyDataSource(Options.Create(options), NullLogger<NotifyDataSource>.Instance);
|
||||
_repository = new InboxRepository(dataSource, NullLogger<InboxRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetById_RoundTripsInboxItem()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var inbox = new InboxEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
Title = "New Vulnerability Found",
|
||||
Body = "Critical vulnerability CVE-2024-1234 detected",
|
||||
EventType = "vulnerability.found",
|
||||
ActionUrl = "/scans/123/vulnerabilities"
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(inbox);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, inbox.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(inbox.Id);
|
||||
fetched.Title.Should().Be("New Vulnerability Found");
|
||||
fetched.Read.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetForUser_ReturnsUserInboxItems()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var inbox1 = CreateInbox(userId, "Item 1");
|
||||
var inbox2 = CreateInbox(userId, "Item 2");
|
||||
var otherUserInbox = CreateInbox(Guid.NewGuid(), "Other user item");
|
||||
await _repository.CreateAsync(inbox1);
|
||||
await _repository.CreateAsync(inbox2);
|
||||
await _repository.CreateAsync(otherUserInbox);
|
||||
|
||||
// Act
|
||||
var items = await _repository.GetForUserAsync(_tenantId, userId);
|
||||
|
||||
// Assert
|
||||
items.Should().HaveCount(2);
|
||||
items.Select(i => i.Title).Should().Contain(["Item 1", "Item 2"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetForUser_FiltersUnreadOnly()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var unreadItem = CreateInbox(userId, "Unread");
|
||||
var readItem = CreateInbox(userId, "Read");
|
||||
await _repository.CreateAsync(unreadItem);
|
||||
await _repository.CreateAsync(readItem);
|
||||
await _repository.MarkReadAsync(_tenantId, readItem.Id);
|
||||
|
||||
// Act
|
||||
var unreadItems = await _repository.GetForUserAsync(_tenantId, userId, unreadOnly: true);
|
||||
|
||||
// Assert
|
||||
unreadItems.Should().HaveCount(1);
|
||||
unreadItems[0].Title.Should().Be("Unread");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetUnreadCount_ReturnsCorrectCount()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
await _repository.CreateAsync(CreateInbox(userId, "Unread 1"));
|
||||
await _repository.CreateAsync(CreateInbox(userId, "Unread 2"));
|
||||
var readItem = CreateInbox(userId, "Read");
|
||||
await _repository.CreateAsync(readItem);
|
||||
await _repository.MarkReadAsync(_tenantId, readItem.Id);
|
||||
|
||||
// Act
|
||||
var count = await _repository.GetUnreadCountAsync(_tenantId, userId);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkRead_UpdatesReadStatus()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var inbox = CreateInbox(userId, "To be read");
|
||||
await _repository.CreateAsync(inbox);
|
||||
|
||||
// Act
|
||||
var result = await _repository.MarkReadAsync(_tenantId, inbox.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, inbox.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Read.Should().BeTrue();
|
||||
fetched.ReadAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkAllRead_MarksAllUserItemsAsRead()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
await _repository.CreateAsync(CreateInbox(userId, "Item 1"));
|
||||
await _repository.CreateAsync(CreateInbox(userId, "Item 2"));
|
||||
await _repository.CreateAsync(CreateInbox(userId, "Item 3"));
|
||||
|
||||
// Act
|
||||
var count = await _repository.MarkAllReadAsync(_tenantId, userId);
|
||||
var unreadCount = await _repository.GetUnreadCountAsync(_tenantId, userId);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(3);
|
||||
unreadCount.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Archive_ArchivesItem()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var inbox = CreateInbox(userId, "To be archived");
|
||||
await _repository.CreateAsync(inbox);
|
||||
|
||||
// Act
|
||||
var result = await _repository.ArchiveAsync(_tenantId, inbox.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, inbox.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Archived.Should().BeTrue();
|
||||
fetched.ArchivedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesItem()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var inbox = CreateInbox(userId, "To be deleted");
|
||||
await _repository.CreateAsync(inbox);
|
||||
|
||||
// Act
|
||||
var result = await _repository.DeleteAsync(_tenantId, inbox.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, inbox.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteOld_RemovesOldItems()
|
||||
{
|
||||
// Arrange - We can't easily set CreatedAt in the test, so this tests the API works
|
||||
var userId = Guid.NewGuid();
|
||||
await _repository.CreateAsync(CreateInbox(userId, "Recent item"));
|
||||
|
||||
// Act - Delete items older than future date (should delete the item)
|
||||
var cutoff = DateTimeOffset.UtcNow.AddMinutes(1);
|
||||
var count = await _repository.DeleteOldAsync(cutoff);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(1);
|
||||
}
|
||||
|
||||
private InboxEntity CreateInbox(Guid userId, string title) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
Title = title,
|
||||
EventType = "test.event"
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,168 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
using StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(NotifyPostgresCollection.Name)]
|
||||
public sealed class NotifyAuditRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly NotifyPostgresFixture _fixture;
|
||||
private readonly NotifyAuditRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public NotifyAuditRepositoryTests(NotifyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new NotifyDataSource(Options.Create(options), NullLogger<NotifyDataSource>.Instance);
|
||||
_repository = new NotifyAuditRepository(dataSource, NullLogger<NotifyAuditRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task Create_ReturnsGeneratedId()
|
||||
{
|
||||
// Arrange
|
||||
var audit = new NotifyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
Action = "channel.created",
|
||||
ResourceType = "channel",
|
||||
ResourceId = Guid.NewGuid().ToString()
|
||||
};
|
||||
|
||||
// Act
|
||||
var id = await _repository.CreateAsync(audit);
|
||||
|
||||
// Assert
|
||||
id.Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAuditEntriesOrderedByCreatedAtDesc()
|
||||
{
|
||||
// Arrange
|
||||
var audit1 = CreateAudit("action1");
|
||||
var audit2 = CreateAudit("action2");
|
||||
await _repository.CreateAsync(audit1);
|
||||
await Task.Delay(10);
|
||||
await _repository.CreateAsync(audit2);
|
||||
|
||||
// Act
|
||||
var audits = await _repository.ListAsync(_tenantId, limit: 10);
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(2);
|
||||
audits[0].Action.Should().Be("action2"); // Most recent first
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByResource_ReturnsResourceAudits()
|
||||
{
|
||||
// Arrange
|
||||
var resourceId = Guid.NewGuid().ToString();
|
||||
var audit = new NotifyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "rule.updated",
|
||||
ResourceType = "rule",
|
||||
ResourceId = resourceId
|
||||
};
|
||||
await _repository.CreateAsync(audit);
|
||||
|
||||
// Act
|
||||
var audits = await _repository.GetByResourceAsync(_tenantId, "rule", resourceId);
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(1);
|
||||
audits[0].ResourceId.Should().Be(resourceId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByResource_WithoutResourceId_ReturnsAllOfType()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.CreateAsync(new NotifyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "template.created",
|
||||
ResourceType = "template",
|
||||
ResourceId = Guid.NewGuid().ToString()
|
||||
});
|
||||
await _repository.CreateAsync(new NotifyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "template.updated",
|
||||
ResourceType = "template",
|
||||
ResourceId = Guid.NewGuid().ToString()
|
||||
});
|
||||
|
||||
// Act
|
||||
var audits = await _repository.GetByResourceAsync(_tenantId, "template");
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCorrelationId_ReturnsCorrelatedAudits()
|
||||
{
|
||||
// Arrange
|
||||
var correlationId = Guid.NewGuid().ToString();
|
||||
var audit1 = new NotifyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "step1",
|
||||
ResourceType = "delivery",
|
||||
CorrelationId = correlationId
|
||||
};
|
||||
var audit2 = new NotifyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "step2",
|
||||
ResourceType = "delivery",
|
||||
CorrelationId = correlationId
|
||||
};
|
||||
await _repository.CreateAsync(audit1);
|
||||
await _repository.CreateAsync(audit2);
|
||||
|
||||
// Act
|
||||
var audits = await _repository.GetByCorrelationIdAsync(_tenantId, correlationId);
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(2);
|
||||
audits.Should().AllSatisfy(a => a.CorrelationId.Should().Be(correlationId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteOld_RemovesOldAudits()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.CreateAsync(CreateAudit("old-action"));
|
||||
|
||||
// Act - Delete audits older than future date
|
||||
var cutoff = DateTimeOffset.UtcNow.AddMinutes(1);
|
||||
var count = await _repository.DeleteOldAsync(cutoff);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(1);
|
||||
}
|
||||
|
||||
private NotifyAuditEntity CreateAudit(string action) => new()
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
Action = action,
|
||||
ResourceType = "test",
|
||||
ResourceId = Guid.NewGuid().ToString()
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,197 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
using StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(NotifyPostgresCollection.Name)]
|
||||
public sealed class RuleRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly NotifyPostgresFixture _fixture;
|
||||
private readonly RuleRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public RuleRepositoryTests(NotifyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new NotifyDataSource(Options.Create(options), NullLogger<NotifyDataSource>.Instance);
|
||||
_repository = new RuleRepository(dataSource, NullLogger<RuleRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetById_RoundTripsRule()
|
||||
{
|
||||
// Arrange
|
||||
var rule = new RuleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "critical-alerts",
|
||||
Description = "Send critical alerts to ops team",
|
||||
Enabled = true,
|
||||
Priority = 100,
|
||||
EventTypes = ["scan.completed", "vulnerability.found"],
|
||||
ChannelIds = [Guid.NewGuid()]
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(rule);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, rule.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(rule.Id);
|
||||
fetched.Name.Should().Be("critical-alerts");
|
||||
fetched.Priority.Should().Be(100);
|
||||
fetched.EventTypes.Should().Contain(["scan.completed", "vulnerability.found"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByName_ReturnsCorrectRule()
|
||||
{
|
||||
// Arrange
|
||||
var rule = CreateRule("info-digest");
|
||||
await _repository.CreateAsync(rule);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "info-digest");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(rule.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAllRulesForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var rule1 = CreateRule("rule1");
|
||||
var rule2 = CreateRule("rule2");
|
||||
await _repository.CreateAsync(rule1);
|
||||
await _repository.CreateAsync(rule2);
|
||||
|
||||
// Act
|
||||
var rules = await _repository.ListAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
rules.Should().HaveCount(2);
|
||||
rules.Select(r => r.Name).Should().Contain(["rule1", "rule2"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_FiltersByEnabled()
|
||||
{
|
||||
// Arrange
|
||||
var enabledRule = CreateRule("enabled");
|
||||
var disabledRule = new RuleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "disabled",
|
||||
Enabled = false,
|
||||
EventTypes = ["test"]
|
||||
};
|
||||
await _repository.CreateAsync(enabledRule);
|
||||
await _repository.CreateAsync(disabledRule);
|
||||
|
||||
// Act
|
||||
var enabledRules = await _repository.ListAsync(_tenantId, enabled: true);
|
||||
|
||||
// Assert
|
||||
enabledRules.Should().HaveCount(1);
|
||||
enabledRules[0].Name.Should().Be("enabled");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetMatchingRules_ReturnsRulesForEventType()
|
||||
{
|
||||
// Arrange
|
||||
var scanRule = new RuleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "scan-rule",
|
||||
Enabled = true,
|
||||
EventTypes = ["scan.completed"]
|
||||
};
|
||||
var vulnRule = new RuleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "vuln-rule",
|
||||
Enabled = true,
|
||||
EventTypes = ["vulnerability.found"]
|
||||
};
|
||||
await _repository.CreateAsync(scanRule);
|
||||
await _repository.CreateAsync(vulnRule);
|
||||
|
||||
// Act
|
||||
var matchingRules = await _repository.GetMatchingRulesAsync(_tenantId, "scan.completed");
|
||||
|
||||
// Assert
|
||||
matchingRules.Should().HaveCount(1);
|
||||
matchingRules[0].Name.Should().Be("scan-rule");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Update_ModifiesRule()
|
||||
{
|
||||
// Arrange
|
||||
var rule = CreateRule("update-test");
|
||||
await _repository.CreateAsync(rule);
|
||||
|
||||
// Act
|
||||
var updated = new RuleEntity
|
||||
{
|
||||
Id = rule.Id,
|
||||
TenantId = _tenantId,
|
||||
Name = "update-test",
|
||||
Description = "Updated description",
|
||||
Priority = 200,
|
||||
Enabled = false,
|
||||
EventTypes = ["new.event"]
|
||||
};
|
||||
var result = await _repository.UpdateAsync(updated);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, rule.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Description.Should().Be("Updated description");
|
||||
fetched.Priority.Should().Be(200);
|
||||
fetched.Enabled.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesRule()
|
||||
{
|
||||
// Arrange
|
||||
var rule = CreateRule("delete-test");
|
||||
await _repository.CreateAsync(rule);
|
||||
|
||||
// Act
|
||||
var result = await _repository.DeleteAsync(_tenantId, rule.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, rule.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
private RuleEntity CreateRule(string name) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = name,
|
||||
Enabled = true,
|
||||
EventTypes = ["test.event"]
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,189 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
using StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(NotifyPostgresCollection.Name)]
|
||||
public sealed class TemplateRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly NotifyPostgresFixture _fixture;
|
||||
private readonly TemplateRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public TemplateRepositoryTests(NotifyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new NotifyDataSource(Options.Create(options), NullLogger<NotifyDataSource>.Instance);
|
||||
_repository = new TemplateRepository(dataSource, NullLogger<TemplateRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetById_RoundTripsTemplate()
|
||||
{
|
||||
// Arrange
|
||||
var template = new TemplateEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "scan-completed",
|
||||
ChannelType = ChannelType.Email,
|
||||
SubjectTemplate = "Scan Completed: {{imageName}}",
|
||||
BodyTemplate = "<p>Scan for {{imageName}} completed with {{vulnCount}} vulnerabilities.</p>",
|
||||
Locale = "en"
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(template);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, template.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(template.Id);
|
||||
fetched.Name.Should().Be("scan-completed");
|
||||
fetched.ChannelType.Should().Be(ChannelType.Email);
|
||||
fetched.SubjectTemplate.Should().Contain("{{imageName}}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByName_ReturnsCorrectTemplate()
|
||||
{
|
||||
// Arrange
|
||||
var template = CreateTemplate("alert-template", ChannelType.Slack);
|
||||
await _repository.CreateAsync(template);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "alert-template", ChannelType.Slack);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(template.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByName_FiltersCorrectlyByLocale()
|
||||
{
|
||||
// Arrange
|
||||
var enTemplate = new TemplateEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "localized-template",
|
||||
ChannelType = ChannelType.Email,
|
||||
BodyTemplate = "English content",
|
||||
Locale = "en"
|
||||
};
|
||||
var frTemplate = new TemplateEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "localized-template",
|
||||
ChannelType = ChannelType.Email,
|
||||
BodyTemplate = "Contenu français",
|
||||
Locale = "fr"
|
||||
};
|
||||
await _repository.CreateAsync(enTemplate);
|
||||
await _repository.CreateAsync(frTemplate);
|
||||
|
||||
// Act
|
||||
var frFetched = await _repository.GetByNameAsync(_tenantId, "localized-template", ChannelType.Email, "fr");
|
||||
|
||||
// Assert
|
||||
frFetched.Should().NotBeNull();
|
||||
frFetched!.BodyTemplate.Should().Contain("français");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAllTemplatesForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var template1 = CreateTemplate("template1", ChannelType.Email);
|
||||
var template2 = CreateTemplate("template2", ChannelType.Slack);
|
||||
await _repository.CreateAsync(template1);
|
||||
await _repository.CreateAsync(template2);
|
||||
|
||||
// Act
|
||||
var templates = await _repository.ListAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
templates.Should().HaveCount(2);
|
||||
templates.Select(t => t.Name).Should().Contain(["template1", "template2"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_FiltersByChannelType()
|
||||
{
|
||||
// Arrange
|
||||
var emailTemplate = CreateTemplate("email", ChannelType.Email);
|
||||
var slackTemplate = CreateTemplate("slack", ChannelType.Slack);
|
||||
await _repository.CreateAsync(emailTemplate);
|
||||
await _repository.CreateAsync(slackTemplate);
|
||||
|
||||
// Act
|
||||
var emailTemplates = await _repository.ListAsync(_tenantId, channelType: ChannelType.Email);
|
||||
|
||||
// Assert
|
||||
emailTemplates.Should().HaveCount(1);
|
||||
emailTemplates[0].Name.Should().Be("email");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Update_ModifiesTemplate()
|
||||
{
|
||||
// Arrange
|
||||
var template = CreateTemplate("update-test", ChannelType.Email);
|
||||
await _repository.CreateAsync(template);
|
||||
|
||||
// Act
|
||||
var updated = new TemplateEntity
|
||||
{
|
||||
Id = template.Id,
|
||||
TenantId = _tenantId,
|
||||
Name = "update-test",
|
||||
ChannelType = ChannelType.Email,
|
||||
SubjectTemplate = "Updated Subject",
|
||||
BodyTemplate = "Updated body content"
|
||||
};
|
||||
var result = await _repository.UpdateAsync(updated);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, template.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.SubjectTemplate.Should().Be("Updated Subject");
|
||||
fetched.BodyTemplate.Should().Be("Updated body content");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesTemplate()
|
||||
{
|
||||
// Arrange
|
||||
var template = CreateTemplate("delete-test", ChannelType.Email);
|
||||
await _repository.CreateAsync(template);
|
||||
|
||||
// Act
|
||||
var result = await _repository.DeleteAsync(_tenantId, template.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, template.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
private TemplateEntity CreateTemplate(string name, ChannelType type) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = name,
|
||||
ChannelType = type,
|
||||
BodyTemplate = "Default template body"
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,567 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Domain.Events;
|
||||
|
||||
/// <summary>
|
||||
/// Standardized event envelope for orchestrator events.
|
||||
/// Supports policy, export, and job lifecycle events with idempotency keys.
|
||||
/// </summary>
|
||||
public sealed record EventEnvelope(
|
||||
/// <summary>Schema version identifier.</summary>
|
||||
string SchemaVersion,
|
||||
|
||||
/// <summary>Unique event ID (UUIDv7 or ULID format).</summary>
|
||||
string EventId,
|
||||
|
||||
/// <summary>Event type classification.</summary>
|
||||
OrchestratorEventType EventType,
|
||||
|
||||
/// <summary>When the event occurred (UTC).</summary>
|
||||
DateTimeOffset OccurredAt,
|
||||
|
||||
/// <summary>Idempotency key for deduplication.</summary>
|
||||
string IdempotencyKey,
|
||||
|
||||
/// <summary>Correlation ID for request tracing.</summary>
|
||||
string? CorrelationId,
|
||||
|
||||
/// <summary>Tenant identifier.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Project identifier (optional but preferred).</summary>
|
||||
string? ProjectId,
|
||||
|
||||
/// <summary>Actor who triggered/emitted the event.</summary>
|
||||
EventActor Actor,
|
||||
|
||||
/// <summary>Job-related metadata (null for non-job events).</summary>
|
||||
EventJob? Job,
|
||||
|
||||
/// <summary>Event metrics.</summary>
|
||||
EventMetrics? Metrics,
|
||||
|
||||
/// <summary>Notifier transport metadata.</summary>
|
||||
EventNotifier? Notifier,
|
||||
|
||||
/// <summary>Event-specific payload.</summary>
|
||||
JsonElement? Payload)
|
||||
{
|
||||
/// <summary>Current schema version.</summary>
|
||||
public const string CurrentSchemaVersion = "orch.event.v1";
|
||||
|
||||
/// <summary>Creates a new event envelope with generated ID and timestamp.</summary>
|
||||
public static EventEnvelope Create(
|
||||
OrchestratorEventType eventType,
|
||||
string tenantId,
|
||||
EventActor actor,
|
||||
string? correlationId = null,
|
||||
string? projectId = null,
|
||||
EventJob? job = null,
|
||||
EventMetrics? metrics = null,
|
||||
EventNotifier? notifier = null,
|
||||
JsonElement? payload = null)
|
||||
{
|
||||
var eventId = GenerateEventId();
|
||||
var idempotencyKey = GenerateIdempotencyKey(eventType, job?.Id, job?.Attempt ?? 0);
|
||||
|
||||
return new EventEnvelope(
|
||||
SchemaVersion: CurrentSchemaVersion,
|
||||
EventId: eventId,
|
||||
EventType: eventType,
|
||||
OccurredAt: DateTimeOffset.UtcNow,
|
||||
IdempotencyKey: idempotencyKey,
|
||||
CorrelationId: correlationId,
|
||||
TenantId: tenantId,
|
||||
ProjectId: projectId,
|
||||
Actor: actor,
|
||||
Job: job,
|
||||
Metrics: metrics,
|
||||
Notifier: notifier,
|
||||
Payload: payload);
|
||||
}
|
||||
|
||||
/// <summary>Creates a job-related event envelope.</summary>
|
||||
public static EventEnvelope ForJob(
|
||||
OrchestratorEventType eventType,
|
||||
string tenantId,
|
||||
EventActor actor,
|
||||
EventJob job,
|
||||
string? correlationId = null,
|
||||
string? projectId = null,
|
||||
EventMetrics? metrics = null,
|
||||
JsonElement? payload = null)
|
||||
{
|
||||
return Create(
|
||||
eventType: eventType,
|
||||
tenantId: tenantId,
|
||||
actor: actor,
|
||||
correlationId: correlationId,
|
||||
projectId: projectId,
|
||||
job: job,
|
||||
metrics: metrics,
|
||||
payload: payload);
|
||||
}
|
||||
|
||||
/// <summary>Creates an export-related event envelope.</summary>
|
||||
public static EventEnvelope ForExport(
|
||||
OrchestratorEventType eventType,
|
||||
string tenantId,
|
||||
EventActor actor,
|
||||
EventJob exportJob,
|
||||
string? correlationId = null,
|
||||
string? projectId = null,
|
||||
EventMetrics? metrics = null,
|
||||
JsonElement? payload = null)
|
||||
{
|
||||
return ForJob(
|
||||
eventType: eventType,
|
||||
tenantId: tenantId,
|
||||
actor: actor,
|
||||
job: exportJob,
|
||||
correlationId: correlationId,
|
||||
projectId: projectId,
|
||||
metrics: metrics,
|
||||
payload: payload);
|
||||
}
|
||||
|
||||
/// <summary>Creates a policy-related event envelope.</summary>
|
||||
public static EventEnvelope ForPolicy(
|
||||
OrchestratorEventType eventType,
|
||||
string tenantId,
|
||||
EventActor actor,
|
||||
string? correlationId = null,
|
||||
string? projectId = null,
|
||||
JsonElement? payload = null)
|
||||
{
|
||||
return Create(
|
||||
eventType: eventType,
|
||||
tenantId: tenantId,
|
||||
actor: actor,
|
||||
correlationId: correlationId,
|
||||
projectId: projectId,
|
||||
payload: payload);
|
||||
}
|
||||
|
||||
/// <summary>Generates a UUIDv7-style event ID.</summary>
|
||||
private static string GenerateEventId()
|
||||
{
|
||||
// UUIDv7: timestamp-based with random suffix
|
||||
var timestamp = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds();
|
||||
var random = Guid.NewGuid().ToString("N")[..16];
|
||||
return $"urn:orch:event:{timestamp:x}-{random}";
|
||||
}
|
||||
|
||||
/// <summary>Generates an idempotency key for deduplication.</summary>
|
||||
public static string GenerateIdempotencyKey(OrchestratorEventType eventType, string? jobId, int attempt)
|
||||
{
|
||||
var jobPart = jobId ?? "none";
|
||||
return $"orch-{eventType.ToEventTypeName()}-{jobPart}-{attempt}";
|
||||
}
|
||||
|
||||
/// <summary>Serializes the envelope to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
/// <summary>Deserializes an envelope from JSON.</summary>
|
||||
public static EventEnvelope? FromJson(string json)
|
||||
{
|
||||
try
|
||||
{
|
||||
return JsonSerializer.Deserialize<EventEnvelope>(json, JsonOptions);
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Computes a digest of the envelope for signing.</summary>
|
||||
public string ComputeDigest()
|
||||
{
|
||||
var json = ToJson();
|
||||
var bytes = Encoding.UTF8.GetBytes(json);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
WriteIndented = false,
|
||||
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Actor who triggered or emitted an event.
|
||||
/// </summary>
|
||||
public sealed record EventActor(
|
||||
/// <summary>Subject identifier (e.g., "service/worker-sdk-go", "user/admin@example.com").</summary>
|
||||
string Subject,
|
||||
|
||||
/// <summary>Scopes/permissions under which the action was taken.</summary>
|
||||
IReadOnlyList<string>? Scopes)
|
||||
{
|
||||
/// <summary>Creates a service actor.</summary>
|
||||
public static EventActor Service(string serviceName, params string[] scopes)
|
||||
=> new($"service/{serviceName}", scopes.Length > 0 ? scopes : null);
|
||||
|
||||
/// <summary>Creates a user actor.</summary>
|
||||
public static EventActor User(string userId, params string[] scopes)
|
||||
=> new($"user/{userId}", scopes.Length > 0 ? scopes : null);
|
||||
|
||||
/// <summary>Creates a system actor (for automated processes).</summary>
|
||||
public static EventActor System(string component, params string[] scopes)
|
||||
=> new($"system/{component}", scopes.Length > 0 ? scopes : null);
|
||||
|
||||
/// <summary>Creates a worker actor.</summary>
|
||||
public static EventActor Worker(string workerId, string sdkType)
|
||||
=> new($"worker/{sdkType}/{workerId}", null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Job-related metadata in an event.
|
||||
/// </summary>
|
||||
public sealed record EventJob(
|
||||
/// <summary>Job identifier.</summary>
|
||||
string Id,
|
||||
|
||||
/// <summary>Job type (e.g., "pack-run", "ingest", "export").</summary>
|
||||
string Type,
|
||||
|
||||
/// <summary>Run identifier (for pack runs / simulations).</summary>
|
||||
string? RunId,
|
||||
|
||||
/// <summary>Attempt number.</summary>
|
||||
int Attempt,
|
||||
|
||||
/// <summary>Lease identifier.</summary>
|
||||
string? LeaseId,
|
||||
|
||||
/// <summary>Task runner identifier.</summary>
|
||||
string? TaskRunnerId,
|
||||
|
||||
/// <summary>Job status.</summary>
|
||||
string Status,
|
||||
|
||||
/// <summary>Status reason (for failures/cancellations).</summary>
|
||||
string? Reason,
|
||||
|
||||
/// <summary>Payload digest for integrity.</summary>
|
||||
string? PayloadDigest,
|
||||
|
||||
/// <summary>Associated artifacts.</summary>
|
||||
IReadOnlyList<EventArtifact>? Artifacts)
|
||||
{
|
||||
/// <summary>Creates job metadata from basic info.</summary>
|
||||
public static EventJob Create(
|
||||
string id,
|
||||
string type,
|
||||
string status,
|
||||
int attempt = 1,
|
||||
string? runId = null,
|
||||
string? leaseId = null,
|
||||
string? taskRunnerId = null,
|
||||
string? reason = null,
|
||||
string? payloadDigest = null,
|
||||
IReadOnlyList<EventArtifact>? artifacts = null)
|
||||
{
|
||||
return new EventJob(
|
||||
Id: id,
|
||||
Type: type,
|
||||
RunId: runId,
|
||||
Attempt: attempt,
|
||||
LeaseId: leaseId,
|
||||
TaskRunnerId: taskRunnerId,
|
||||
Status: status,
|
||||
Reason: reason,
|
||||
PayloadDigest: payloadDigest,
|
||||
Artifacts: artifacts);
|
||||
}
|
||||
|
||||
/// <summary>Creates a completed job event.</summary>
|
||||
public static EventJob Completed(string id, string type, int attempt, string? payloadDigest = null, IReadOnlyList<EventArtifact>? artifacts = null)
|
||||
=> Create(id, type, "completed", attempt, payloadDigest: payloadDigest, artifacts: artifacts);
|
||||
|
||||
/// <summary>Creates a failed job event.</summary>
|
||||
public static EventJob Failed(string id, string type, int attempt, string reason)
|
||||
=> Create(id, type, "failed", attempt, reason: reason);
|
||||
|
||||
/// <summary>Creates a canceled job event.</summary>
|
||||
public static EventJob Canceled(string id, string type, int attempt, string reason)
|
||||
=> Create(id, type, "canceled", attempt, reason: reason);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Artifact metadata in an event.
|
||||
/// </summary>
|
||||
public sealed record EventArtifact(
|
||||
/// <summary>Artifact URI (storage location).</summary>
|
||||
string Uri,
|
||||
|
||||
/// <summary>Content digest for integrity.</summary>
|
||||
string Digest,
|
||||
|
||||
/// <summary>MIME type.</summary>
|
||||
string Mime);
|
||||
|
||||
/// <summary>
|
||||
/// Event timing and performance metrics.
|
||||
/// </summary>
|
||||
public sealed record EventMetrics(
|
||||
/// <summary>Duration in seconds.</summary>
|
||||
double? DurationSeconds,
|
||||
|
||||
/// <summary>Log stream lag in seconds.</summary>
|
||||
double? LogStreamLagSeconds,
|
||||
|
||||
/// <summary>Backoff delay in seconds.</summary>
|
||||
double? BackoffSeconds,
|
||||
|
||||
/// <summary>Queue wait time in seconds.</summary>
|
||||
double? QueueWaitSeconds,
|
||||
|
||||
/// <summary>Processing time in seconds.</summary>
|
||||
double? ProcessingSeconds)
|
||||
{
|
||||
/// <summary>Creates metrics with just duration.</summary>
|
||||
public static EventMetrics WithDuration(double seconds)
|
||||
=> new(seconds, null, null, null, null);
|
||||
|
||||
/// <summary>Creates metrics with duration and processing breakdown.</summary>
|
||||
public static EventMetrics WithBreakdown(double total, double queueWait, double processing)
|
||||
=> new(total, null, null, queueWait, processing);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Notifier transport metadata.
|
||||
/// </summary>
|
||||
public sealed record EventNotifier(
|
||||
/// <summary>Notifier channel name.</summary>
|
||||
string Channel,
|
||||
|
||||
/// <summary>Delivery format (e.g., "dsse", "raw").</summary>
|
||||
string Delivery,
|
||||
|
||||
/// <summary>Replay metadata (for replayed events).</summary>
|
||||
EventReplay? Replay)
|
||||
{
|
||||
/// <summary>Creates notifier metadata for the jobs channel.</summary>
|
||||
public static EventNotifier JobsChannel(string delivery = "dsse")
|
||||
=> new("orch.jobs", delivery, null);
|
||||
|
||||
/// <summary>Creates notifier metadata for the exports channel.</summary>
|
||||
public static EventNotifier ExportsChannel(string delivery = "dsse")
|
||||
=> new("orch.exports", delivery, null);
|
||||
|
||||
/// <summary>Creates notifier metadata for the policy channel.</summary>
|
||||
public static EventNotifier PolicyChannel(string delivery = "dsse")
|
||||
=> new("orch.policy", delivery, null);
|
||||
|
||||
/// <summary>Adds replay metadata.</summary>
|
||||
public EventNotifier WithReplay(int ordinal, int total)
|
||||
=> this with { Replay = new EventReplay(ordinal, total) };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replay metadata for replayed events.
|
||||
/// </summary>
|
||||
public sealed record EventReplay(
|
||||
/// <summary>Ordinal position in replay sequence.</summary>
|
||||
int Ordinal,
|
||||
|
||||
/// <summary>Total events in replay sequence.</summary>
|
||||
int Total);
|
||||
|
||||
/// <summary>
|
||||
/// Orchestrator event types.
|
||||
/// </summary>
|
||||
public enum OrchestratorEventType
|
||||
{
|
||||
// Job lifecycle
|
||||
JobCreated,
|
||||
JobScheduled,
|
||||
JobStarted,
|
||||
JobCompleted,
|
||||
JobFailed,
|
||||
JobCanceled,
|
||||
JobRetrying,
|
||||
|
||||
// Export lifecycle
|
||||
ExportCreated,
|
||||
ExportStarted,
|
||||
ExportCompleted,
|
||||
ExportFailed,
|
||||
ExportCanceled,
|
||||
ExportArchived,
|
||||
ExportExpired,
|
||||
ExportDeleted,
|
||||
|
||||
// Schedule lifecycle
|
||||
ScheduleCreated,
|
||||
ScheduleEnabled,
|
||||
ScheduleDisabled,
|
||||
ScheduleTriggered,
|
||||
ScheduleSkipped,
|
||||
|
||||
// Alert lifecycle
|
||||
AlertCreated,
|
||||
AlertAcknowledged,
|
||||
AlertResolved,
|
||||
|
||||
// Retention lifecycle
|
||||
RetentionPruneStarted,
|
||||
RetentionPruneCompleted,
|
||||
|
||||
// Policy lifecycle
|
||||
PolicyUpdated,
|
||||
PolicySimulated,
|
||||
PolicyApplied,
|
||||
|
||||
// Pack run lifecycle
|
||||
PackRunCreated,
|
||||
PackRunStarted,
|
||||
PackRunLog,
|
||||
PackRunArtifact,
|
||||
PackRunCompleted,
|
||||
PackRunFailed
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for event types.
|
||||
/// </summary>
|
||||
public static class OrchestratorEventTypeExtensions
|
||||
{
|
||||
/// <summary>Converts event type to canonical string name.</summary>
|
||||
public static string ToEventTypeName(this OrchestratorEventType eventType)
|
||||
{
|
||||
return eventType switch
|
||||
{
|
||||
OrchestratorEventType.JobCreated => "job.created",
|
||||
OrchestratorEventType.JobScheduled => "job.scheduled",
|
||||
OrchestratorEventType.JobStarted => "job.started",
|
||||
OrchestratorEventType.JobCompleted => "job.completed",
|
||||
OrchestratorEventType.JobFailed => "job.failed",
|
||||
OrchestratorEventType.JobCanceled => "job.canceled",
|
||||
OrchestratorEventType.JobRetrying => "job.retrying",
|
||||
|
||||
OrchestratorEventType.ExportCreated => "export.created",
|
||||
OrchestratorEventType.ExportStarted => "export.started",
|
||||
OrchestratorEventType.ExportCompleted => "export.completed",
|
||||
OrchestratorEventType.ExportFailed => "export.failed",
|
||||
OrchestratorEventType.ExportCanceled => "export.canceled",
|
||||
OrchestratorEventType.ExportArchived => "export.archived",
|
||||
OrchestratorEventType.ExportExpired => "export.expired",
|
||||
OrchestratorEventType.ExportDeleted => "export.deleted",
|
||||
|
||||
OrchestratorEventType.ScheduleCreated => "schedule.created",
|
||||
OrchestratorEventType.ScheduleEnabled => "schedule.enabled",
|
||||
OrchestratorEventType.ScheduleDisabled => "schedule.disabled",
|
||||
OrchestratorEventType.ScheduleTriggered => "schedule.triggered",
|
||||
OrchestratorEventType.ScheduleSkipped => "schedule.skipped",
|
||||
|
||||
OrchestratorEventType.AlertCreated => "alert.created",
|
||||
OrchestratorEventType.AlertAcknowledged => "alert.acknowledged",
|
||||
OrchestratorEventType.AlertResolved => "alert.resolved",
|
||||
|
||||
OrchestratorEventType.RetentionPruneStarted => "retention.prune_started",
|
||||
OrchestratorEventType.RetentionPruneCompleted => "retention.prune_completed",
|
||||
|
||||
OrchestratorEventType.PolicyUpdated => "policy.updated",
|
||||
OrchestratorEventType.PolicySimulated => "policy.simulated",
|
||||
OrchestratorEventType.PolicyApplied => "policy.applied",
|
||||
|
||||
OrchestratorEventType.PackRunCreated => "pack_run.created",
|
||||
OrchestratorEventType.PackRunStarted => "pack_run.started",
|
||||
OrchestratorEventType.PackRunLog => "pack_run.log",
|
||||
OrchestratorEventType.PackRunArtifact => "pack_run.artifact",
|
||||
OrchestratorEventType.PackRunCompleted => "pack_run.completed",
|
||||
OrchestratorEventType.PackRunFailed => "pack_run.failed",
|
||||
|
||||
_ => eventType.ToString().ToLowerInvariant()
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>Parses a canonical event type name.</summary>
|
||||
public static OrchestratorEventType? FromEventTypeName(string name)
|
||||
{
|
||||
return name switch
|
||||
{
|
||||
"job.created" => OrchestratorEventType.JobCreated,
|
||||
"job.scheduled" => OrchestratorEventType.JobScheduled,
|
||||
"job.started" => OrchestratorEventType.JobStarted,
|
||||
"job.completed" => OrchestratorEventType.JobCompleted,
|
||||
"job.failed" => OrchestratorEventType.JobFailed,
|
||||
"job.canceled" => OrchestratorEventType.JobCanceled,
|
||||
"job.retrying" => OrchestratorEventType.JobRetrying,
|
||||
|
||||
"export.created" => OrchestratorEventType.ExportCreated,
|
||||
"export.started" => OrchestratorEventType.ExportStarted,
|
||||
"export.completed" => OrchestratorEventType.ExportCompleted,
|
||||
"export.failed" => OrchestratorEventType.ExportFailed,
|
||||
"export.canceled" => OrchestratorEventType.ExportCanceled,
|
||||
"export.archived" => OrchestratorEventType.ExportArchived,
|
||||
"export.expired" => OrchestratorEventType.ExportExpired,
|
||||
"export.deleted" => OrchestratorEventType.ExportDeleted,
|
||||
|
||||
"schedule.created" => OrchestratorEventType.ScheduleCreated,
|
||||
"schedule.enabled" => OrchestratorEventType.ScheduleEnabled,
|
||||
"schedule.disabled" => OrchestratorEventType.ScheduleDisabled,
|
||||
"schedule.triggered" => OrchestratorEventType.ScheduleTriggered,
|
||||
"schedule.skipped" => OrchestratorEventType.ScheduleSkipped,
|
||||
|
||||
"alert.created" => OrchestratorEventType.AlertCreated,
|
||||
"alert.acknowledged" => OrchestratorEventType.AlertAcknowledged,
|
||||
"alert.resolved" => OrchestratorEventType.AlertResolved,
|
||||
|
||||
"retention.prune_started" => OrchestratorEventType.RetentionPruneStarted,
|
||||
"retention.prune_completed" => OrchestratorEventType.RetentionPruneCompleted,
|
||||
|
||||
"policy.updated" => OrchestratorEventType.PolicyUpdated,
|
||||
"policy.simulated" => OrchestratorEventType.PolicySimulated,
|
||||
"policy.applied" => OrchestratorEventType.PolicyApplied,
|
||||
|
||||
"pack_run.created" => OrchestratorEventType.PackRunCreated,
|
||||
"pack_run.started" => OrchestratorEventType.PackRunStarted,
|
||||
"pack_run.log" => OrchestratorEventType.PackRunLog,
|
||||
"pack_run.artifact" => OrchestratorEventType.PackRunArtifact,
|
||||
"pack_run.completed" => OrchestratorEventType.PackRunCompleted,
|
||||
"pack_run.failed" => OrchestratorEventType.PackRunFailed,
|
||||
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>Whether the event type is a failure event.</summary>
|
||||
public static bool IsFailure(this OrchestratorEventType eventType)
|
||||
{
|
||||
return eventType is
|
||||
OrchestratorEventType.JobFailed or
|
||||
OrchestratorEventType.ExportFailed or
|
||||
OrchestratorEventType.PackRunFailed;
|
||||
}
|
||||
|
||||
/// <summary>Whether the event type is a completion event.</summary>
|
||||
public static bool IsCompletion(this OrchestratorEventType eventType)
|
||||
{
|
||||
return eventType is
|
||||
OrchestratorEventType.JobCompleted or
|
||||
OrchestratorEventType.ExportCompleted or
|
||||
OrchestratorEventType.PackRunCompleted or
|
||||
OrchestratorEventType.RetentionPruneCompleted;
|
||||
}
|
||||
|
||||
/// <summary>Whether the event type is a lifecycle terminal event.</summary>
|
||||
public static bool IsTerminal(this OrchestratorEventType eventType)
|
||||
{
|
||||
return eventType.IsFailure() || eventType.IsCompletion() ||
|
||||
eventType is
|
||||
OrchestratorEventType.JobCanceled or
|
||||
OrchestratorEventType.ExportCanceled or
|
||||
OrchestratorEventType.ExportDeleted or
|
||||
OrchestratorEventType.AlertResolved;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,241 @@
|
||||
namespace StellaOps.Orchestrator.Core.Domain.Events;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for publishing orchestrator events to the notifier bus.
|
||||
/// </summary>
|
||||
public interface IEventPublisher
|
||||
{
|
||||
/// <summary>Publishes an event to the notifier bus.</summary>
|
||||
/// <param name="envelope">The event envelope to publish.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if published successfully; false if deduplicated.</returns>
|
||||
Task<bool> PublishAsync(EventEnvelope envelope, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Publishes multiple events to the notifier bus.</summary>
|
||||
/// <param name="envelopes">The event envelopes to publish.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The result containing success/dedup counts.</returns>
|
||||
Task<BatchPublishResult> PublishBatchAsync(IEnumerable<EventEnvelope> envelopes, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Checks if an event with the given idempotency key has already been published.</summary>
|
||||
/// <param name="idempotencyKey">The idempotency key to check.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if already published.</returns>
|
||||
Task<bool> IsPublishedAsync(string idempotencyKey, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a batch publish operation.
|
||||
/// </summary>
|
||||
public sealed record BatchPublishResult(
|
||||
/// <summary>Number of events successfully published.</summary>
|
||||
int Published,
|
||||
|
||||
/// <summary>Number of events deduplicated (already published).</summary>
|
||||
int Deduplicated,
|
||||
|
||||
/// <summary>Number of events that failed to publish.</summary>
|
||||
int Failed,
|
||||
|
||||
/// <summary>Errors encountered during publishing.</summary>
|
||||
IReadOnlyList<string> Errors)
|
||||
{
|
||||
/// <summary>Total events processed.</summary>
|
||||
public int Total => Published + Deduplicated + Failed;
|
||||
|
||||
/// <summary>Whether any events were published successfully.</summary>
|
||||
public bool HasPublished => Published > 0;
|
||||
|
||||
/// <summary>Whether any errors occurred.</summary>
|
||||
public bool HasErrors => Failed > 0 || Errors.Count > 0;
|
||||
|
||||
/// <summary>Creates an empty result.</summary>
|
||||
public static BatchPublishResult Empty => new(0, 0, 0, []);
|
||||
|
||||
/// <summary>Creates a successful single publish result.</summary>
|
||||
public static BatchPublishResult SingleSuccess => new(1, 0, 0, []);
|
||||
|
||||
/// <summary>Creates a deduplicated single result.</summary>
|
||||
public static BatchPublishResult SingleDeduplicated => new(0, 1, 0, []);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Event publishing options.
|
||||
/// </summary>
|
||||
public sealed record EventPublishOptions(
|
||||
/// <summary>Whether to sign events with DSSE.</summary>
|
||||
bool SignWithDsse,
|
||||
|
||||
/// <summary>Maximum retry attempts for transient failures.</summary>
|
||||
int MaxRetries,
|
||||
|
||||
/// <summary>Base delay between retries.</summary>
|
||||
TimeSpan RetryDelay,
|
||||
|
||||
/// <summary>TTL for idempotency key tracking.</summary>
|
||||
TimeSpan IdempotencyTtl,
|
||||
|
||||
/// <summary>Whether to include provenance metadata.</summary>
|
||||
bool IncludeProvenance,
|
||||
|
||||
/// <summary>Whether to compress large payloads.</summary>
|
||||
bool CompressLargePayloads,
|
||||
|
||||
/// <summary>Threshold for payload compression (bytes).</summary>
|
||||
int CompressionThreshold)
|
||||
{
|
||||
/// <summary>Default publishing options.</summary>
|
||||
public static EventPublishOptions Default => new(
|
||||
SignWithDsse: true,
|
||||
MaxRetries: 3,
|
||||
RetryDelay: TimeSpan.FromSeconds(1),
|
||||
IdempotencyTtl: TimeSpan.FromHours(24),
|
||||
IncludeProvenance: true,
|
||||
CompressLargePayloads: true,
|
||||
CompressionThreshold: 64 * 1024);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for event signing.
|
||||
/// </summary>
|
||||
public interface IEventSigner
|
||||
{
|
||||
/// <summary>Signs an event envelope with DSSE.</summary>
|
||||
/// <param name="envelope">The envelope to sign.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The signed envelope as a DSSE payload.</returns>
|
||||
Task<string> SignAsync(EventEnvelope envelope, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Verifies a signed event envelope.</summary>
|
||||
/// <param name="signedPayload">The signed DSSE payload.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The verified envelope, or null if verification fails.</returns>
|
||||
Task<EventEnvelope?> VerifyAsync(string signedPayload, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for idempotency tracking.
|
||||
/// </summary>
|
||||
public interface IIdempotencyStore
|
||||
{
|
||||
/// <summary>Tries to mark an idempotency key as processed.</summary>
|
||||
/// <param name="key">The idempotency key.</param>
|
||||
/// <param name="ttl">TTL for the key.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if newly marked; false if already existed.</returns>
|
||||
Task<bool> TryMarkAsync(string key, TimeSpan ttl, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Checks if an idempotency key exists.</summary>
|
||||
/// <param name="key">The idempotency key.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if the key exists.</returns>
|
||||
Task<bool> ExistsAsync(string key, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Removes an idempotency key.</summary>
|
||||
/// <param name="key">The idempotency key.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task RemoveAsync(string key, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for the notifier bus transport.
|
||||
/// </summary>
|
||||
public interface INotifierBus
|
||||
{
|
||||
/// <summary>Sends a message to the notifier bus.</summary>
|
||||
/// <param name="channel">Target channel.</param>
|
||||
/// <param name="message">Message payload (JSON or signed DSSE).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task SendAsync(string channel, string message, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Sends multiple messages to the notifier bus.</summary>
|
||||
/// <param name="channel">Target channel.</param>
|
||||
/// <param name="messages">Message payloads.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task SendBatchAsync(string channel, IEnumerable<string> messages, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Null implementation of event publisher for testing.
|
||||
/// </summary>
|
||||
public sealed class NullEventPublisher : IEventPublisher
|
||||
{
|
||||
/// <summary>Singleton instance.</summary>
|
||||
public static NullEventPublisher Instance { get; } = new();
|
||||
|
||||
private NullEventPublisher() { }
|
||||
|
||||
public Task<bool> PublishAsync(EventEnvelope envelope, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(true);
|
||||
|
||||
public Task<BatchPublishResult> PublishBatchAsync(IEnumerable<EventEnvelope> envelopes, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var count = envelopes.Count();
|
||||
return Task.FromResult(new BatchPublishResult(count, 0, 0, []));
|
||||
}
|
||||
|
||||
public Task<bool> IsPublishedAsync(string idempotencyKey, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of idempotency store for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryIdempotencyStore : IIdempotencyStore
|
||||
{
|
||||
private readonly Dictionary<string, DateTimeOffset> _keys = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task<bool> TryMarkAsync(string key, TimeSpan ttl, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
CleanupExpired();
|
||||
if (_keys.ContainsKey(key))
|
||||
return Task.FromResult(false);
|
||||
|
||||
_keys[key] = DateTimeOffset.UtcNow.Add(ttl);
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
}
|
||||
|
||||
public Task<bool> ExistsAsync(string key, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
CleanupExpired();
|
||||
return Task.FromResult(_keys.ContainsKey(key));
|
||||
}
|
||||
}
|
||||
|
||||
public Task RemoveAsync(string key, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_keys.Remove(key);
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private void CleanupExpired()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var expired = _keys.Where(kv => kv.Value <= now).Select(kv => kv.Key).ToList();
|
||||
foreach (var key in expired)
|
||||
{
|
||||
_keys.Remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Gets the current key count (for testing).</summary>
|
||||
public int Count
|
||||
{
|
||||
get { lock (_lock) { CleanupExpired(); return _keys.Count; } }
|
||||
}
|
||||
|
||||
/// <summary>Clears all keys (for testing).</summary>
|
||||
public void Clear()
|
||||
{
|
||||
lock (_lock) { _keys.Clear(); }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,559 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Domain.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Export job payload containing export-specific parameters.
|
||||
/// Serialized to JSON and stored in Job.Payload.
|
||||
/// </summary>
|
||||
public sealed record ExportJobPayload(
|
||||
/// <summary>Export format (e.g., "json", "ndjson", "csv", "spdx", "cyclonedx").</summary>
|
||||
string Format,
|
||||
|
||||
/// <summary>Start of time range to export (inclusive).</summary>
|
||||
DateTimeOffset? StartTime,
|
||||
|
||||
/// <summary>End of time range to export (exclusive).</summary>
|
||||
DateTimeOffset? EndTime,
|
||||
|
||||
/// <summary>Filter by source ID.</summary>
|
||||
Guid? SourceId,
|
||||
|
||||
/// <summary>Filter by project ID.</summary>
|
||||
string? ProjectId,
|
||||
|
||||
/// <summary>Filter by specific entity IDs.</summary>
|
||||
IReadOnlyList<Guid>? EntityIds,
|
||||
|
||||
/// <summary>Maximum entries to export (pagination/limit).</summary>
|
||||
int? MaxEntries,
|
||||
|
||||
/// <summary>Whether to include provenance metadata.</summary>
|
||||
bool IncludeProvenance,
|
||||
|
||||
/// <summary>Whether to sign the export output.</summary>
|
||||
bool SignOutput,
|
||||
|
||||
/// <summary>Compression format (null = none, "gzip", "zstd").</summary>
|
||||
string? Compression,
|
||||
|
||||
/// <summary>Destination URI for the export output.</summary>
|
||||
string? DestinationUri,
|
||||
|
||||
/// <summary>Callback URL for completion notification.</summary>
|
||||
string? CallbackUrl,
|
||||
|
||||
/// <summary>Additional export-specific options.</summary>
|
||||
IReadOnlyDictionary<string, string>? Options)
|
||||
{
|
||||
/// <summary>Default export payload with minimal settings.</summary>
|
||||
public static ExportJobPayload Default(string format) => new(
|
||||
Format: format,
|
||||
StartTime: null,
|
||||
EndTime: null,
|
||||
SourceId: null,
|
||||
ProjectId: null,
|
||||
EntityIds: null,
|
||||
MaxEntries: null,
|
||||
IncludeProvenance: true,
|
||||
SignOutput: true,
|
||||
Compression: null,
|
||||
DestinationUri: null,
|
||||
CallbackUrl: null,
|
||||
Options: null);
|
||||
|
||||
/// <summary>Serializes the payload to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
/// <summary>Computes SHA-256 digest of the payload.</summary>
|
||||
public string ComputeDigest()
|
||||
{
|
||||
var json = ToJson();
|
||||
var bytes = Encoding.UTF8.GetBytes(json);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
/// <summary>Deserializes a payload from JSON. Returns null for invalid JSON.</summary>
|
||||
public static ExportJobPayload? FromJson(string json)
|
||||
{
|
||||
try
|
||||
{
|
||||
return JsonSerializer.Deserialize<ExportJobPayload>(json, JsonOptions);
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export job result containing output metadata.
|
||||
/// </summary>
|
||||
public sealed record ExportJobResult(
|
||||
/// <summary>Output URI where export is stored.</summary>
|
||||
string OutputUri,
|
||||
|
||||
/// <summary>SHA-256 digest of the output.</summary>
|
||||
string OutputDigest,
|
||||
|
||||
/// <summary>Output size in bytes.</summary>
|
||||
long OutputSizeBytes,
|
||||
|
||||
/// <summary>Number of entries exported.</summary>
|
||||
int EntryCount,
|
||||
|
||||
/// <summary>Export format used.</summary>
|
||||
string Format,
|
||||
|
||||
/// <summary>Compression applied (if any).</summary>
|
||||
string? Compression,
|
||||
|
||||
/// <summary>Provenance attestation URI (if signed).</summary>
|
||||
string? ProvenanceUri,
|
||||
|
||||
/// <summary>Start of actual exported time range.</summary>
|
||||
DateTimeOffset? ActualStartTime,
|
||||
|
||||
/// <summary>End of actual exported time range.</summary>
|
||||
DateTimeOffset? ActualEndTime,
|
||||
|
||||
/// <summary>Export generation timestamp.</summary>
|
||||
DateTimeOffset GeneratedAt,
|
||||
|
||||
/// <summary>Duration of export operation in seconds.</summary>
|
||||
double DurationSeconds)
|
||||
{
|
||||
/// <summary>Serializes the result to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
/// <summary>Deserializes a result from JSON.</summary>
|
||||
public static ExportJobResult? FromJson(string json) =>
|
||||
JsonSerializer.Deserialize<ExportJobResult>(json, JsonOptions);
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export job progress information.
|
||||
/// </summary>
|
||||
public sealed record ExportJobProgress(
|
||||
/// <summary>Current phase of export.</summary>
|
||||
ExportPhase Phase,
|
||||
|
||||
/// <summary>Entries processed so far.</summary>
|
||||
int EntriesProcessed,
|
||||
|
||||
/// <summary>Total entries to process (if known).</summary>
|
||||
int? TotalEntries,
|
||||
|
||||
/// <summary>Bytes written so far.</summary>
|
||||
long BytesWritten,
|
||||
|
||||
/// <summary>Current progress message.</summary>
|
||||
string? Message)
|
||||
{
|
||||
/// <summary>Computes progress percentage (0-100).</summary>
|
||||
public double? ProgressPercent => TotalEntries > 0
|
||||
? Math.Min(100.0, 100.0 * EntriesProcessed / TotalEntries.Value)
|
||||
: null;
|
||||
|
||||
/// <summary>Serializes the progress to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
/// <summary>Deserializes progress from JSON.</summary>
|
||||
public static ExportJobProgress? FromJson(string json) =>
|
||||
JsonSerializer.Deserialize<ExportJobProgress>(json, JsonOptions);
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export job phases.
|
||||
/// </summary>
|
||||
public enum ExportPhase
|
||||
{
|
||||
/// <summary>Initializing export.</summary>
|
||||
Initializing = 0,
|
||||
|
||||
/// <summary>Querying data.</summary>
|
||||
Querying = 1,
|
||||
|
||||
/// <summary>Formatting output.</summary>
|
||||
Formatting = 2,
|
||||
|
||||
/// <summary>Compressing output.</summary>
|
||||
Compressing = 3,
|
||||
|
||||
/// <summary>Signing/attesting output.</summary>
|
||||
Signing = 4,
|
||||
|
||||
/// <summary>Uploading to destination.</summary>
|
||||
Uploading = 5,
|
||||
|
||||
/// <summary>Finalizing export.</summary>
|
||||
Finalizing = 6,
|
||||
|
||||
/// <summary>Export completed.</summary>
|
||||
Completed = 7
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Distribution metadata for export jobs.
|
||||
/// Tracks where exports are stored, download URLs, and replication status.
|
||||
/// </summary>
|
||||
public sealed record ExportDistribution(
|
||||
/// <summary>Primary storage location URI.</summary>
|
||||
string PrimaryUri,
|
||||
|
||||
/// <summary>Pre-signed download URL (time-limited).</summary>
|
||||
string? DownloadUrl,
|
||||
|
||||
/// <summary>Download URL expiration time.</summary>
|
||||
DateTimeOffset? DownloadUrlExpiresAt,
|
||||
|
||||
/// <summary>Storage provider (e.g., "s3", "azure-blob", "gcs", "local").</summary>
|
||||
string StorageProvider,
|
||||
|
||||
/// <summary>Storage region/location.</summary>
|
||||
string? Region,
|
||||
|
||||
/// <summary>Storage tier (e.g., "hot", "cool", "archive").</summary>
|
||||
string StorageTier,
|
||||
|
||||
/// <summary>Replication targets with their URIs.</summary>
|
||||
IReadOnlyDictionary<string, string>? Replicas,
|
||||
|
||||
/// <summary>Replication status per target.</summary>
|
||||
IReadOnlyDictionary<string, ReplicationStatus>? ReplicationStatus,
|
||||
|
||||
/// <summary>Content type of the export.</summary>
|
||||
string ContentType,
|
||||
|
||||
/// <summary>Access control list (principals with access).</summary>
|
||||
IReadOnlyList<string>? AccessList,
|
||||
|
||||
/// <summary>Whether export is publicly accessible.</summary>
|
||||
bool IsPublic,
|
||||
|
||||
/// <summary>Distribution creation timestamp.</summary>
|
||||
DateTimeOffset CreatedAt)
|
||||
{
|
||||
/// <summary>Serializes distribution to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
/// <summary>Deserializes distribution from JSON.</summary>
|
||||
public static ExportDistribution? FromJson(string json)
|
||||
{
|
||||
try
|
||||
{
|
||||
return JsonSerializer.Deserialize<ExportDistribution>(json, JsonOptions);
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Creates a download URL with expiration.</summary>
|
||||
public ExportDistribution WithDownloadUrl(string url, TimeSpan validity) => this with
|
||||
{
|
||||
DownloadUrl = url,
|
||||
DownloadUrlExpiresAt = DateTimeOffset.UtcNow.Add(validity)
|
||||
};
|
||||
|
||||
/// <summary>Adds a replication target.</summary>
|
||||
public ExportDistribution WithReplica(string target, string uri, ReplicationStatus status)
|
||||
{
|
||||
var replicas = Replicas is null
|
||||
? new Dictionary<string, string> { [target] = uri }
|
||||
: new Dictionary<string, string>(Replicas) { [target] = uri };
|
||||
|
||||
var replicationStatus = ReplicationStatus is null
|
||||
? new Dictionary<string, ReplicationStatus> { [target] = status }
|
||||
: new Dictionary<string, ReplicationStatus>(ReplicationStatus) { [target] = status };
|
||||
|
||||
return this with { Replicas = replicas, ReplicationStatus = replicationStatus };
|
||||
}
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replication status for a distribution target.
|
||||
/// </summary>
|
||||
public enum ReplicationStatus
|
||||
{
|
||||
/// <summary>Replication pending.</summary>
|
||||
Pending = 0,
|
||||
|
||||
/// <summary>Replication in progress.</summary>
|
||||
InProgress = 1,
|
||||
|
||||
/// <summary>Replication completed successfully.</summary>
|
||||
Completed = 2,
|
||||
|
||||
/// <summary>Replication failed.</summary>
|
||||
Failed = 3,
|
||||
|
||||
/// <summary>Replication skipped.</summary>
|
||||
Skipped = 4
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retention policy and timestamps for export jobs.
|
||||
/// Controls when exports are archived, deleted, or need manual action.
|
||||
/// </summary>
|
||||
public sealed record ExportRetention(
|
||||
/// <summary>Retention policy name.</summary>
|
||||
string PolicyName,
|
||||
|
||||
/// <summary>When the export becomes available for download.</summary>
|
||||
DateTimeOffset AvailableAt,
|
||||
|
||||
/// <summary>When the export should be moved to archive tier.</summary>
|
||||
DateTimeOffset? ArchiveAt,
|
||||
|
||||
/// <summary>When the export should be deleted.</summary>
|
||||
DateTimeOffset? ExpiresAt,
|
||||
|
||||
/// <summary>When the export was actually archived.</summary>
|
||||
DateTimeOffset? ArchivedAt,
|
||||
|
||||
/// <summary>When the export was actually deleted.</summary>
|
||||
DateTimeOffset? DeletedAt,
|
||||
|
||||
/// <summary>Whether legal hold prevents deletion.</summary>
|
||||
bool LegalHold,
|
||||
|
||||
/// <summary>Legal hold reason (if applicable).</summary>
|
||||
string? LegalHoldReason,
|
||||
|
||||
/// <summary>Whether export requires explicit release before deletion.</summary>
|
||||
bool RequiresRelease,
|
||||
|
||||
/// <summary>Who released the export for deletion (if applicable).</summary>
|
||||
string? ReleasedBy,
|
||||
|
||||
/// <summary>When export was released for deletion.</summary>
|
||||
DateTimeOffset? ReleasedAt,
|
||||
|
||||
/// <summary>Number of times retention was extended.</summary>
|
||||
int ExtensionCount,
|
||||
|
||||
/// <summary>Retention metadata (audit trail, etc.).</summary>
|
||||
IReadOnlyDictionary<string, string>? Metadata)
|
||||
{
|
||||
/// <summary>Default retention policy names.</summary>
|
||||
public static class PolicyNames
|
||||
{
|
||||
public const string Default = "default";
|
||||
public const string Compliance = "compliance";
|
||||
public const string Temporary = "temporary";
|
||||
public const string LongTerm = "long-term";
|
||||
public const string Permanent = "permanent";
|
||||
}
|
||||
|
||||
/// <summary>Default retention periods.</summary>
|
||||
public static class DefaultPeriods
|
||||
{
|
||||
public static readonly TimeSpan Temporary = TimeSpan.FromDays(7);
|
||||
public static readonly TimeSpan Default = TimeSpan.FromDays(30);
|
||||
public static readonly TimeSpan LongTerm = TimeSpan.FromDays(365);
|
||||
public static readonly TimeSpan ArchiveDelay = TimeSpan.FromDays(90);
|
||||
}
|
||||
|
||||
/// <summary>Creates a default retention policy.</summary>
|
||||
public static ExportRetention Default(DateTimeOffset now) => new(
|
||||
PolicyName: PolicyNames.Default,
|
||||
AvailableAt: now,
|
||||
ArchiveAt: now.Add(DefaultPeriods.ArchiveDelay),
|
||||
ExpiresAt: now.Add(DefaultPeriods.Default),
|
||||
ArchivedAt: null,
|
||||
DeletedAt: null,
|
||||
LegalHold: false,
|
||||
LegalHoldReason: null,
|
||||
RequiresRelease: false,
|
||||
ReleasedBy: null,
|
||||
ReleasedAt: null,
|
||||
ExtensionCount: 0,
|
||||
Metadata: null);
|
||||
|
||||
/// <summary>Creates a temporary retention policy.</summary>
|
||||
public static ExportRetention Temporary(DateTimeOffset now) => new(
|
||||
PolicyName: PolicyNames.Temporary,
|
||||
AvailableAt: now,
|
||||
ArchiveAt: null,
|
||||
ExpiresAt: now.Add(DefaultPeriods.Temporary),
|
||||
ArchivedAt: null,
|
||||
DeletedAt: null,
|
||||
LegalHold: false,
|
||||
LegalHoldReason: null,
|
||||
RequiresRelease: false,
|
||||
ReleasedBy: null,
|
||||
ReleasedAt: null,
|
||||
ExtensionCount: 0,
|
||||
Metadata: null);
|
||||
|
||||
/// <summary>Creates a compliance retention policy (requires release).</summary>
|
||||
public static ExportRetention Compliance(DateTimeOffset now, TimeSpan minimumRetention) => new(
|
||||
PolicyName: PolicyNames.Compliance,
|
||||
AvailableAt: now,
|
||||
ArchiveAt: now.Add(DefaultPeriods.ArchiveDelay),
|
||||
ExpiresAt: now.Add(minimumRetention),
|
||||
ArchivedAt: null,
|
||||
DeletedAt: null,
|
||||
LegalHold: false,
|
||||
LegalHoldReason: null,
|
||||
RequiresRelease: true,
|
||||
ReleasedBy: null,
|
||||
ReleasedAt: null,
|
||||
ExtensionCount: 0,
|
||||
Metadata: null);
|
||||
|
||||
/// <summary>Whether the export is expired.</summary>
|
||||
public bool IsExpired => ExpiresAt.HasValue && DateTimeOffset.UtcNow >= ExpiresAt.Value && !LegalHold;
|
||||
|
||||
/// <summary>Whether the export should be archived.</summary>
|
||||
public bool ShouldArchive => ArchiveAt.HasValue && DateTimeOffset.UtcNow >= ArchiveAt.Value && !ArchivedAt.HasValue;
|
||||
|
||||
/// <summary>Whether the export can be deleted.</summary>
|
||||
public bool CanDelete => IsExpired && (!RequiresRelease || ReleasedAt.HasValue) && !LegalHold;
|
||||
|
||||
/// <summary>Extends the retention period.</summary>
|
||||
public ExportRetention ExtendRetention(TimeSpan extension, string? reason = null)
|
||||
{
|
||||
var metadata = Metadata is null
|
||||
? new Dictionary<string, string>()
|
||||
: new Dictionary<string, string>(Metadata);
|
||||
|
||||
metadata[$"extension_{ExtensionCount + 1}_at"] = DateTimeOffset.UtcNow.ToString("o");
|
||||
if (reason is not null)
|
||||
metadata[$"extension_{ExtensionCount + 1}_reason"] = reason;
|
||||
|
||||
return this with
|
||||
{
|
||||
ExpiresAt = (ExpiresAt ?? DateTimeOffset.UtcNow).Add(extension),
|
||||
ArchiveAt = ArchiveAt?.Add(extension),
|
||||
ExtensionCount = ExtensionCount + 1,
|
||||
Metadata = metadata
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>Places a legal hold on the export.</summary>
|
||||
public ExportRetention PlaceLegalHold(string reason) => this with
|
||||
{
|
||||
LegalHold = true,
|
||||
LegalHoldReason = reason
|
||||
};
|
||||
|
||||
/// <summary>Releases a legal hold.</summary>
|
||||
public ExportRetention ReleaseLegalHold() => this with
|
||||
{
|
||||
LegalHold = false,
|
||||
LegalHoldReason = null
|
||||
};
|
||||
|
||||
/// <summary>Releases the export for deletion.</summary>
|
||||
public ExportRetention Release(string releasedBy) => this with
|
||||
{
|
||||
ReleasedBy = releasedBy,
|
||||
ReleasedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
/// <summary>Marks the export as archived.</summary>
|
||||
public ExportRetention MarkArchived() => this with
|
||||
{
|
||||
ArchivedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
/// <summary>Marks the export as deleted.</summary>
|
||||
public ExportRetention MarkDeleted() => this with
|
||||
{
|
||||
DeletedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
/// <summary>Serializes retention to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
/// <summary>Deserializes retention from JSON.</summary>
|
||||
public static ExportRetention? FromJson(string json)
|
||||
{
|
||||
try
|
||||
{
|
||||
return JsonSerializer.Deserialize<ExportRetention>(json, JsonOptions);
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Complete export job state for streaming updates.
|
||||
/// </summary>
|
||||
public sealed record ExportJobState(
|
||||
/// <summary>Job ID.</summary>
|
||||
Guid JobId,
|
||||
|
||||
/// <summary>Export type.</summary>
|
||||
string ExportType,
|
||||
|
||||
/// <summary>Current status.</summary>
|
||||
string Status,
|
||||
|
||||
/// <summary>Current progress.</summary>
|
||||
ExportJobProgress? Progress,
|
||||
|
||||
/// <summary>Job result (when complete).</summary>
|
||||
ExportJobResult? Result,
|
||||
|
||||
/// <summary>Distribution metadata (when complete).</summary>
|
||||
ExportDistribution? Distribution,
|
||||
|
||||
/// <summary>Retention policy.</summary>
|
||||
ExportRetention? Retention,
|
||||
|
||||
/// <summary>Error message (when failed).</summary>
|
||||
string? Error,
|
||||
|
||||
/// <summary>State timestamp.</summary>
|
||||
DateTimeOffset Timestamp)
|
||||
{
|
||||
/// <summary>Serializes state to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,173 @@
|
||||
namespace StellaOps.Orchestrator.Core.Domain.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Default policy settings for export jobs.
|
||||
/// These values are used when creating export job quotas and rate limits.
|
||||
/// </summary>
|
||||
public static class ExportJobPolicy
|
||||
{
|
||||
/// <summary>
|
||||
/// Default quota settings for export jobs.
|
||||
/// Export jobs are typically I/O bound and should be limited to prevent resource exhaustion.
|
||||
/// </summary>
|
||||
public static class QuotaDefaults
|
||||
{
|
||||
/// <summary>Maximum concurrent export jobs per tenant.</summary>
|
||||
public const int MaxActive = 5;
|
||||
|
||||
/// <summary>Maximum export jobs per hour per tenant.</summary>
|
||||
public const int MaxPerHour = 50;
|
||||
|
||||
/// <summary>Token bucket burst capacity.</summary>
|
||||
public const int BurstCapacity = 10;
|
||||
|
||||
/// <summary>Token refill rate (tokens per second).</summary>
|
||||
public const double RefillRate = 0.5;
|
||||
|
||||
/// <summary>Default priority for export jobs (lower than scan jobs).</summary>
|
||||
public const int DefaultPriority = -10;
|
||||
|
||||
/// <summary>Maximum retry attempts for export jobs.</summary>
|
||||
public const int MaxAttempts = 3;
|
||||
|
||||
/// <summary>Default lease duration in seconds.</summary>
|
||||
public const int DefaultLeaseSeconds = 600; // 10 minutes
|
||||
|
||||
/// <summary>Maximum lease duration in seconds.</summary>
|
||||
public const int MaxLeaseSeconds = 3600; // 1 hour
|
||||
|
||||
/// <summary>Heartbeat interval recommendation in seconds.</summary>
|
||||
public const int RecommendedHeartbeatInterval = 60;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rate limiting settings for export jobs by type.
|
||||
/// Different export types may have different resource requirements.
|
||||
/// </summary>
|
||||
public static class RateLimits
|
||||
{
|
||||
/// <summary>Ledger export: moderate rate (database-heavy).</summary>
|
||||
public static readonly ExportRateLimit Ledger = new(
|
||||
MaxConcurrent: 3,
|
||||
MaxPerHour: 30,
|
||||
EstimatedDurationSeconds: 120);
|
||||
|
||||
/// <summary>SBOM export: higher rate (typically smaller datasets).</summary>
|
||||
public static readonly ExportRateLimit Sbom = new(
|
||||
MaxConcurrent: 5,
|
||||
MaxPerHour: 100,
|
||||
EstimatedDurationSeconds: 30);
|
||||
|
||||
/// <summary>VEX export: similar to SBOM.</summary>
|
||||
public static readonly ExportRateLimit Vex = new(
|
||||
MaxConcurrent: 5,
|
||||
MaxPerHour: 100,
|
||||
EstimatedDurationSeconds: 30);
|
||||
|
||||
/// <summary>Scan results export: moderate rate.</summary>
|
||||
public static readonly ExportRateLimit ScanResults = new(
|
||||
MaxConcurrent: 3,
|
||||
MaxPerHour: 50,
|
||||
EstimatedDurationSeconds: 60);
|
||||
|
||||
/// <summary>Policy evaluation export: moderate rate.</summary>
|
||||
public static readonly ExportRateLimit PolicyEvaluation = new(
|
||||
MaxConcurrent: 3,
|
||||
MaxPerHour: 50,
|
||||
EstimatedDurationSeconds: 60);
|
||||
|
||||
/// <summary>Attestation export: lower rate (cryptographic operations).</summary>
|
||||
public static readonly ExportRateLimit Attestation = new(
|
||||
MaxConcurrent: 2,
|
||||
MaxPerHour: 20,
|
||||
EstimatedDurationSeconds: 180);
|
||||
|
||||
/// <summary>Portable bundle export: lowest rate (large bundles).</summary>
|
||||
public static readonly ExportRateLimit PortableBundle = new(
|
||||
MaxConcurrent: 1,
|
||||
MaxPerHour: 10,
|
||||
EstimatedDurationSeconds: 600);
|
||||
|
||||
/// <summary>Gets rate limit for a specific export type.</summary>
|
||||
public static ExportRateLimit GetForJobType(string jobType) => jobType switch
|
||||
{
|
||||
ExportJobTypes.Ledger => Ledger,
|
||||
ExportJobTypes.Sbom => Sbom,
|
||||
ExportJobTypes.Vex => Vex,
|
||||
ExportJobTypes.ScanResults => ScanResults,
|
||||
ExportJobTypes.PolicyEvaluation => PolicyEvaluation,
|
||||
ExportJobTypes.Attestation => Attestation,
|
||||
ExportJobTypes.PortableBundle => PortableBundle,
|
||||
_ => new ExportRateLimit(MaxConcurrent: 3, MaxPerHour: 30, EstimatedDurationSeconds: 120)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Timeout settings for export jobs.
|
||||
/// </summary>
|
||||
public static class Timeouts
|
||||
{
|
||||
/// <summary>Maximum time for an export job before it's considered stale.</summary>
|
||||
public static readonly TimeSpan MaxJobDuration = TimeSpan.FromHours(2);
|
||||
|
||||
/// <summary>Maximum time to wait for a heartbeat before reclaiming.</summary>
|
||||
public static readonly TimeSpan HeartbeatTimeout = TimeSpan.FromMinutes(5);
|
||||
|
||||
/// <summary>Backoff delay after failure before retry.</summary>
|
||||
public static readonly TimeSpan RetryBackoff = TimeSpan.FromMinutes(1);
|
||||
|
||||
/// <summary>Maximum backoff delay for exponential retry.</summary>
|
||||
public static readonly TimeSpan MaxRetryBackoff = TimeSpan.FromMinutes(30);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a default quota for export jobs.
|
||||
/// </summary>
|
||||
public static Quota CreateDefaultQuota(
|
||||
string tenantId,
|
||||
string? jobType = null,
|
||||
string createdBy = "system")
|
||||
{
|
||||
var rateLimit = jobType is not null && ExportJobTypes.IsExportJob(jobType)
|
||||
? RateLimits.GetForJobType(jobType)
|
||||
: new ExportRateLimit(
|
||||
QuotaDefaults.MaxActive,
|
||||
QuotaDefaults.MaxPerHour,
|
||||
QuotaDefaults.DefaultLeaseSeconds);
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
return new Quota(
|
||||
QuotaId: Guid.NewGuid(),
|
||||
TenantId: tenantId,
|
||||
JobType: jobType,
|
||||
MaxActive: rateLimit.MaxConcurrent,
|
||||
MaxPerHour: rateLimit.MaxPerHour,
|
||||
BurstCapacity: QuotaDefaults.BurstCapacity,
|
||||
RefillRate: QuotaDefaults.RefillRate,
|
||||
CurrentTokens: QuotaDefaults.BurstCapacity,
|
||||
LastRefillAt: now,
|
||||
CurrentActive: 0,
|
||||
CurrentHourCount: 0,
|
||||
CurrentHourStart: now,
|
||||
Paused: false,
|
||||
PauseReason: null,
|
||||
QuotaTicket: null,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
UpdatedBy: createdBy);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rate limit configuration for an export type.
|
||||
/// </summary>
|
||||
public sealed record ExportRateLimit(
|
||||
/// <summary>Maximum concurrent jobs of this type.</summary>
|
||||
int MaxConcurrent,
|
||||
|
||||
/// <summary>Maximum jobs per hour.</summary>
|
||||
int MaxPerHour,
|
||||
|
||||
/// <summary>Estimated duration in seconds (for scheduling hints).</summary>
|
||||
int EstimatedDurationSeconds);
|
||||
@@ -0,0 +1,61 @@
|
||||
namespace StellaOps.Orchestrator.Core.Domain.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Standard export job type identifiers.
|
||||
/// Export jobs follow the pattern "export.{target}" where target is the export destination/format.
|
||||
/// </summary>
|
||||
public static class ExportJobTypes
|
||||
{
|
||||
/// <summary>Job type prefix for all export jobs.</summary>
|
||||
public const string Prefix = "export.";
|
||||
|
||||
/// <summary>Run ledger export (audit trail, immutable snapshots).</summary>
|
||||
public const string Ledger = "export.ledger";
|
||||
|
||||
/// <summary>SBOM export (SPDX, CycloneDX formats).</summary>
|
||||
public const string Sbom = "export.sbom";
|
||||
|
||||
/// <summary>VEX document export.</summary>
|
||||
public const string Vex = "export.vex";
|
||||
|
||||
/// <summary>Scan results export.</summary>
|
||||
public const string ScanResults = "export.scan-results";
|
||||
|
||||
/// <summary>Policy evaluation export.</summary>
|
||||
public const string PolicyEvaluation = "export.policy-evaluation";
|
||||
|
||||
/// <summary>Attestation bundle export.</summary>
|
||||
public const string Attestation = "export.attestation";
|
||||
|
||||
/// <summary>Portable evidence bundle export (for air-gap transfer).</summary>
|
||||
public const string PortableBundle = "export.portable-bundle";
|
||||
|
||||
/// <summary>All known export job types.</summary>
|
||||
public static readonly IReadOnlyList<string> All =
|
||||
[
|
||||
Ledger,
|
||||
Sbom,
|
||||
Vex,
|
||||
ScanResults,
|
||||
PolicyEvaluation,
|
||||
Attestation,
|
||||
PortableBundle
|
||||
];
|
||||
|
||||
/// <summary>Checks if a job type is an export job.</summary>
|
||||
public static bool IsExportJob(string? jobType) =>
|
||||
jobType is not null && jobType.StartsWith(Prefix, StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
/// <summary>Gets the export target from a job type (e.g., "ledger" from "export.ledger").</summary>
|
||||
public static string? GetExportTarget(string? jobType)
|
||||
{
|
||||
if (!IsExportJob(jobType))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return jobType!.Length > Prefix.Length
|
||||
? jobType[Prefix.Length..]
|
||||
: null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,537 @@
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Domain.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a scheduled export configuration.
|
||||
/// Exports can be scheduled to run on a cron pattern.
|
||||
/// </summary>
|
||||
public sealed record ExportSchedule(
|
||||
/// <summary>Schedule ID.</summary>
|
||||
Guid ScheduleId,
|
||||
|
||||
/// <summary>Tenant ID.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Schedule name for identification.</summary>
|
||||
string Name,
|
||||
|
||||
/// <summary>Schedule description.</summary>
|
||||
string? Description,
|
||||
|
||||
/// <summary>Export type to execute.</summary>
|
||||
string ExportType,
|
||||
|
||||
/// <summary>Cron expression for scheduling (5 or 6 fields).</summary>
|
||||
string CronExpression,
|
||||
|
||||
/// <summary>Timezone for cron evaluation (IANA format).</summary>
|
||||
string Timezone,
|
||||
|
||||
/// <summary>Whether the schedule is enabled.</summary>
|
||||
bool Enabled,
|
||||
|
||||
/// <summary>Export payload template.</summary>
|
||||
ExportJobPayload PayloadTemplate,
|
||||
|
||||
/// <summary>Retention policy to apply to generated exports.</summary>
|
||||
string RetentionPolicy,
|
||||
|
||||
/// <summary>Project ID filter (optional).</summary>
|
||||
string? ProjectId,
|
||||
|
||||
/// <summary>Maximum concurrent exports from this schedule.</summary>
|
||||
int MaxConcurrent,
|
||||
|
||||
/// <summary>Whether to skip if previous run is still executing.</summary>
|
||||
bool SkipIfRunning,
|
||||
|
||||
/// <summary>Last successful run timestamp.</summary>
|
||||
DateTimeOffset? LastRunAt,
|
||||
|
||||
/// <summary>Last run job ID.</summary>
|
||||
Guid? LastJobId,
|
||||
|
||||
/// <summary>Last run status.</summary>
|
||||
string? LastRunStatus,
|
||||
|
||||
/// <summary>Next scheduled run time.</summary>
|
||||
DateTimeOffset? NextRunAt,
|
||||
|
||||
/// <summary>Total runs executed.</summary>
|
||||
long TotalRuns,
|
||||
|
||||
/// <summary>Successful runs count.</summary>
|
||||
long SuccessfulRuns,
|
||||
|
||||
/// <summary>Failed runs count.</summary>
|
||||
long FailedRuns,
|
||||
|
||||
/// <summary>Created timestamp.</summary>
|
||||
DateTimeOffset CreatedAt,
|
||||
|
||||
/// <summary>Last updated timestamp.</summary>
|
||||
DateTimeOffset UpdatedAt,
|
||||
|
||||
/// <summary>Created by user.</summary>
|
||||
string CreatedBy,
|
||||
|
||||
/// <summary>Last updated by user.</summary>
|
||||
string UpdatedBy)
|
||||
{
|
||||
/// <summary>Creates a new export schedule.</summary>
|
||||
public static ExportSchedule Create(
|
||||
string tenantId,
|
||||
string name,
|
||||
string exportType,
|
||||
string cronExpression,
|
||||
ExportJobPayload payloadTemplate,
|
||||
string createdBy,
|
||||
string? description = null,
|
||||
string timezone = "UTC",
|
||||
string retentionPolicy = "default",
|
||||
string? projectId = null,
|
||||
int maxConcurrent = 1,
|
||||
bool skipIfRunning = true)
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
return new ExportSchedule(
|
||||
ScheduleId: Guid.NewGuid(),
|
||||
TenantId: tenantId,
|
||||
Name: name,
|
||||
Description: description,
|
||||
ExportType: exportType,
|
||||
CronExpression: cronExpression,
|
||||
Timezone: timezone,
|
||||
Enabled: true,
|
||||
PayloadTemplate: payloadTemplate,
|
||||
RetentionPolicy: retentionPolicy,
|
||||
ProjectId: projectId,
|
||||
MaxConcurrent: maxConcurrent,
|
||||
SkipIfRunning: skipIfRunning,
|
||||
LastRunAt: null,
|
||||
LastJobId: null,
|
||||
LastRunStatus: null,
|
||||
NextRunAt: null,
|
||||
TotalRuns: 0,
|
||||
SuccessfulRuns: 0,
|
||||
FailedRuns: 0,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
CreatedBy: createdBy,
|
||||
UpdatedBy: createdBy);
|
||||
}
|
||||
|
||||
/// <summary>Success rate as percentage (0-100).</summary>
|
||||
public double SuccessRate => TotalRuns > 0
|
||||
? 100.0 * SuccessfulRuns / TotalRuns
|
||||
: 0;
|
||||
|
||||
/// <summary>Enables the schedule.</summary>
|
||||
public ExportSchedule Enable() => this with
|
||||
{
|
||||
Enabled = true,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
/// <summary>Disables the schedule.</summary>
|
||||
public ExportSchedule Disable() => this with
|
||||
{
|
||||
Enabled = false,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
/// <summary>Records a successful run.</summary>
|
||||
public ExportSchedule RecordSuccess(Guid jobId, DateTimeOffset? nextRun = null) => this with
|
||||
{
|
||||
LastRunAt = DateTimeOffset.UtcNow,
|
||||
LastJobId = jobId,
|
||||
LastRunStatus = "completed",
|
||||
NextRunAt = nextRun,
|
||||
TotalRuns = TotalRuns + 1,
|
||||
SuccessfulRuns = SuccessfulRuns + 1,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
/// <summary>Records a failed run.</summary>
|
||||
public ExportSchedule RecordFailure(Guid jobId, string? reason = null, DateTimeOffset? nextRun = null) => this with
|
||||
{
|
||||
LastRunAt = DateTimeOffset.UtcNow,
|
||||
LastJobId = jobId,
|
||||
LastRunStatus = $"failed: {reason ?? "unknown"}",
|
||||
NextRunAt = nextRun,
|
||||
TotalRuns = TotalRuns + 1,
|
||||
FailedRuns = FailedRuns + 1,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
/// <summary>Updates the next run time.</summary>
|
||||
public ExportSchedule WithNextRun(DateTimeOffset nextRun) => this with
|
||||
{
|
||||
NextRunAt = nextRun,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
/// <summary>Updates the cron expression.</summary>
|
||||
public ExportSchedule WithCron(string cronExpression, string updatedBy) => this with
|
||||
{
|
||||
CronExpression = cronExpression,
|
||||
UpdatedAt = DateTimeOffset.UtcNow,
|
||||
UpdatedBy = updatedBy
|
||||
};
|
||||
|
||||
/// <summary>Updates the payload template.</summary>
|
||||
public ExportSchedule WithPayload(ExportJobPayload payload, string updatedBy) => this with
|
||||
{
|
||||
PayloadTemplate = payload,
|
||||
UpdatedAt = DateTimeOffset.UtcNow,
|
||||
UpdatedBy = updatedBy
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for retention pruning.
|
||||
/// </summary>
|
||||
public sealed record RetentionPruneConfig(
|
||||
/// <summary>Pruning job ID.</summary>
|
||||
Guid PruneId,
|
||||
|
||||
/// <summary>Tenant ID (null for global).</summary>
|
||||
string? TenantId,
|
||||
|
||||
/// <summary>Export type filter (null for all).</summary>
|
||||
string? ExportType,
|
||||
|
||||
/// <summary>Whether pruning is enabled.</summary>
|
||||
bool Enabled,
|
||||
|
||||
/// <summary>Cron expression for prune schedule.</summary>
|
||||
string CronExpression,
|
||||
|
||||
/// <summary>Maximum exports to prune per run.</summary>
|
||||
int BatchSize,
|
||||
|
||||
/// <summary>Whether to archive before deleting.</summary>
|
||||
bool ArchiveBeforeDelete,
|
||||
|
||||
/// <summary>Archive storage provider.</summary>
|
||||
string? ArchiveProvider,
|
||||
|
||||
/// <summary>Whether to notify on prune completion.</summary>
|
||||
bool NotifyOnComplete,
|
||||
|
||||
/// <summary>Notification channel for alerts.</summary>
|
||||
string? NotificationChannel,
|
||||
|
||||
/// <summary>Last prune timestamp.</summary>
|
||||
DateTimeOffset? LastPruneAt,
|
||||
|
||||
/// <summary>Exports pruned in last run.</summary>
|
||||
int LastPruneCount,
|
||||
|
||||
/// <summary>Total exports pruned.</summary>
|
||||
long TotalPruned,
|
||||
|
||||
/// <summary>Created timestamp.</summary>
|
||||
DateTimeOffset CreatedAt,
|
||||
|
||||
/// <summary>Updated timestamp.</summary>
|
||||
DateTimeOffset UpdatedAt)
|
||||
{
|
||||
/// <summary>Default batch size for pruning.</summary>
|
||||
public const int DefaultBatchSize = 100;
|
||||
|
||||
/// <summary>Default cron expression (daily at 2 AM).</summary>
|
||||
public const string DefaultCronExpression = "0 2 * * *";
|
||||
|
||||
/// <summary>Creates a default prune configuration.</summary>
|
||||
public static RetentionPruneConfig Create(
|
||||
string? tenantId = null,
|
||||
string? exportType = null,
|
||||
string? cronExpression = null,
|
||||
int batchSize = DefaultBatchSize)
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
return new RetentionPruneConfig(
|
||||
PruneId: Guid.NewGuid(),
|
||||
TenantId: tenantId,
|
||||
ExportType: exportType,
|
||||
Enabled: true,
|
||||
CronExpression: cronExpression ?? DefaultCronExpression,
|
||||
BatchSize: batchSize,
|
||||
ArchiveBeforeDelete: true,
|
||||
ArchiveProvider: null,
|
||||
NotifyOnComplete: false,
|
||||
NotificationChannel: null,
|
||||
LastPruneAt: null,
|
||||
LastPruneCount: 0,
|
||||
TotalPruned: 0,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now);
|
||||
}
|
||||
|
||||
/// <summary>Records a prune operation.</summary>
|
||||
public RetentionPruneConfig RecordPrune(int count) => this with
|
||||
{
|
||||
LastPruneAt = DateTimeOffset.UtcNow,
|
||||
LastPruneCount = count,
|
||||
TotalPruned = TotalPruned + count,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export failure alert configuration.
|
||||
/// </summary>
|
||||
public sealed record ExportAlertConfig(
|
||||
/// <summary>Alert configuration ID.</summary>
|
||||
Guid AlertConfigId,
|
||||
|
||||
/// <summary>Tenant ID.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Alert name.</summary>
|
||||
string Name,
|
||||
|
||||
/// <summary>Export type filter (null for all).</summary>
|
||||
string? ExportType,
|
||||
|
||||
/// <summary>Whether alerting is enabled.</summary>
|
||||
bool Enabled,
|
||||
|
||||
/// <summary>Minimum consecutive failures to trigger.</summary>
|
||||
int ConsecutiveFailuresThreshold,
|
||||
|
||||
/// <summary>Failure rate threshold (0-100).</summary>
|
||||
double FailureRateThreshold,
|
||||
|
||||
/// <summary>Time window for failure rate calculation.</summary>
|
||||
TimeSpan FailureRateWindow,
|
||||
|
||||
/// <summary>Alert severity.</summary>
|
||||
ExportAlertSeverity Severity,
|
||||
|
||||
/// <summary>Notification channels (comma-separated).</summary>
|
||||
string NotificationChannels,
|
||||
|
||||
/// <summary>Alert cooldown period.</summary>
|
||||
TimeSpan Cooldown,
|
||||
|
||||
/// <summary>Last alert timestamp.</summary>
|
||||
DateTimeOffset? LastAlertAt,
|
||||
|
||||
/// <summary>Total alerts triggered.</summary>
|
||||
long TotalAlerts,
|
||||
|
||||
/// <summary>Created timestamp.</summary>
|
||||
DateTimeOffset CreatedAt,
|
||||
|
||||
/// <summary>Updated timestamp.</summary>
|
||||
DateTimeOffset UpdatedAt)
|
||||
{
|
||||
/// <summary>Creates a default alert configuration.</summary>
|
||||
public static ExportAlertConfig Create(
|
||||
string tenantId,
|
||||
string name,
|
||||
string? exportType = null,
|
||||
int consecutiveFailuresThreshold = 3,
|
||||
double failureRateThreshold = 50.0,
|
||||
ExportAlertSeverity severity = ExportAlertSeverity.Warning)
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
return new ExportAlertConfig(
|
||||
AlertConfigId: Guid.NewGuid(),
|
||||
TenantId: tenantId,
|
||||
Name: name,
|
||||
ExportType: exportType,
|
||||
Enabled: true,
|
||||
ConsecutiveFailuresThreshold: consecutiveFailuresThreshold,
|
||||
FailureRateThreshold: failureRateThreshold,
|
||||
FailureRateWindow: TimeSpan.FromHours(1),
|
||||
Severity: severity,
|
||||
NotificationChannels: "email",
|
||||
Cooldown: TimeSpan.FromMinutes(15),
|
||||
LastAlertAt: null,
|
||||
TotalAlerts: 0,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now);
|
||||
}
|
||||
|
||||
/// <summary>Whether an alert can be triggered (respects cooldown).</summary>
|
||||
public bool CanAlert => !LastAlertAt.HasValue ||
|
||||
DateTimeOffset.UtcNow >= LastAlertAt.Value.Add(Cooldown);
|
||||
|
||||
/// <summary>Records an alert.</summary>
|
||||
public ExportAlertConfig RecordAlert() => this with
|
||||
{
|
||||
LastAlertAt = DateTimeOffset.UtcNow,
|
||||
TotalAlerts = TotalAlerts + 1,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export alert severity levels.
|
||||
/// </summary>
|
||||
public enum ExportAlertSeverity
|
||||
{
|
||||
/// <summary>Informational.</summary>
|
||||
Info = 0,
|
||||
|
||||
/// <summary>Warning - attention needed.</summary>
|
||||
Warning = 1,
|
||||
|
||||
/// <summary>Error - action required.</summary>
|
||||
Error = 2,
|
||||
|
||||
/// <summary>Critical - immediate action.</summary>
|
||||
Critical = 3
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export failure alert instance.
|
||||
/// </summary>
|
||||
public sealed record ExportAlert(
|
||||
/// <summary>Alert ID.</summary>
|
||||
Guid AlertId,
|
||||
|
||||
/// <summary>Alert configuration ID.</summary>
|
||||
Guid AlertConfigId,
|
||||
|
||||
/// <summary>Tenant ID.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Export type.</summary>
|
||||
string ExportType,
|
||||
|
||||
/// <summary>Alert severity.</summary>
|
||||
ExportAlertSeverity Severity,
|
||||
|
||||
/// <summary>Alert message.</summary>
|
||||
string Message,
|
||||
|
||||
/// <summary>Failed job IDs.</summary>
|
||||
IReadOnlyList<Guid> FailedJobIds,
|
||||
|
||||
/// <summary>Consecutive failure count.</summary>
|
||||
int ConsecutiveFailures,
|
||||
|
||||
/// <summary>Current failure rate.</summary>
|
||||
double FailureRate,
|
||||
|
||||
/// <summary>Alert timestamp.</summary>
|
||||
DateTimeOffset TriggeredAt,
|
||||
|
||||
/// <summary>Acknowledged timestamp.</summary>
|
||||
DateTimeOffset? AcknowledgedAt,
|
||||
|
||||
/// <summary>Acknowledged by user.</summary>
|
||||
string? AcknowledgedBy,
|
||||
|
||||
/// <summary>Resolved timestamp.</summary>
|
||||
DateTimeOffset? ResolvedAt,
|
||||
|
||||
/// <summary>Resolution notes.</summary>
|
||||
string? ResolutionNotes)
|
||||
{
|
||||
/// <summary>Creates a new alert for consecutive failures.</summary>
|
||||
public static ExportAlert CreateForConsecutiveFailures(
|
||||
Guid alertConfigId,
|
||||
string tenantId,
|
||||
string exportType,
|
||||
ExportAlertSeverity severity,
|
||||
IReadOnlyList<Guid> failedJobIds,
|
||||
int consecutiveFailures)
|
||||
{
|
||||
return new ExportAlert(
|
||||
AlertId: Guid.NewGuid(),
|
||||
AlertConfigId: alertConfigId,
|
||||
TenantId: tenantId,
|
||||
ExportType: exportType,
|
||||
Severity: severity,
|
||||
Message: $"Export job {exportType} has failed {consecutiveFailures} consecutive times",
|
||||
FailedJobIds: failedJobIds,
|
||||
ConsecutiveFailures: consecutiveFailures,
|
||||
FailureRate: 0,
|
||||
TriggeredAt: DateTimeOffset.UtcNow,
|
||||
AcknowledgedAt: null,
|
||||
AcknowledgedBy: null,
|
||||
ResolvedAt: null,
|
||||
ResolutionNotes: null);
|
||||
}
|
||||
|
||||
/// <summary>Creates a new alert for high failure rate.</summary>
|
||||
public static ExportAlert CreateForHighFailureRate(
|
||||
Guid alertConfigId,
|
||||
string tenantId,
|
||||
string exportType,
|
||||
ExportAlertSeverity severity,
|
||||
double failureRate,
|
||||
IReadOnlyList<Guid> recentFailedJobIds)
|
||||
{
|
||||
return new ExportAlert(
|
||||
AlertId: Guid.NewGuid(),
|
||||
AlertConfigId: alertConfigId,
|
||||
TenantId: tenantId,
|
||||
ExportType: exportType,
|
||||
Severity: severity,
|
||||
Message: $"Export job {exportType} failure rate is {failureRate:F1}%",
|
||||
FailedJobIds: recentFailedJobIds,
|
||||
ConsecutiveFailures: 0,
|
||||
FailureRate: failureRate,
|
||||
TriggeredAt: DateTimeOffset.UtcNow,
|
||||
AcknowledgedAt: null,
|
||||
AcknowledgedBy: null,
|
||||
ResolvedAt: null,
|
||||
ResolutionNotes: null);
|
||||
}
|
||||
|
||||
/// <summary>Acknowledges the alert.</summary>
|
||||
public ExportAlert Acknowledge(string acknowledgedBy) => this with
|
||||
{
|
||||
AcknowledgedAt = DateTimeOffset.UtcNow,
|
||||
AcknowledgedBy = acknowledgedBy
|
||||
};
|
||||
|
||||
/// <summary>Resolves the alert.</summary>
|
||||
public ExportAlert Resolve(string? notes = null) => this with
|
||||
{
|
||||
ResolvedAt = DateTimeOffset.UtcNow,
|
||||
ResolutionNotes = notes
|
||||
};
|
||||
|
||||
/// <summary>Whether the alert is active (not resolved).</summary>
|
||||
public bool IsActive => ResolvedAt is null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a retention prune operation.
|
||||
/// </summary>
|
||||
public sealed record RetentionPruneResult(
|
||||
/// <summary>Number of exports archived.</summary>
|
||||
int ArchivedCount,
|
||||
|
||||
/// <summary>Number of exports deleted.</summary>
|
||||
int DeletedCount,
|
||||
|
||||
/// <summary>Number of exports skipped (legal hold, etc.).</summary>
|
||||
int SkippedCount,
|
||||
|
||||
/// <summary>Errors encountered.</summary>
|
||||
IReadOnlyList<string> Errors,
|
||||
|
||||
/// <summary>Duration of prune operation.</summary>
|
||||
TimeSpan Duration)
|
||||
{
|
||||
/// <summary>Total exports processed.</summary>
|
||||
public int TotalProcessed => ArchivedCount + DeletedCount + SkippedCount;
|
||||
|
||||
/// <summary>Whether any errors occurred.</summary>
|
||||
public bool HasErrors => Errors.Count > 0;
|
||||
|
||||
/// <summary>Empty result.</summary>
|
||||
public static RetentionPruneResult Empty => new(0, 0, 0, [], TimeSpan.Zero);
|
||||
}
|
||||
@@ -0,0 +1,180 @@
|
||||
namespace StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
/// <summary>
|
||||
/// Represents an Authority pack execution.
|
||||
/// Pack runs execute policy automation scripts with log collection and artifact production.
|
||||
/// </summary>
|
||||
public sealed record PackRun(
|
||||
/// <summary>Unique pack run identifier.</summary>
|
||||
Guid PackRunId,
|
||||
|
||||
/// <summary>Tenant owning this pack run.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Optional project scope within tenant.</summary>
|
||||
string? ProjectId,
|
||||
|
||||
/// <summary>Authority pack ID being executed.</summary>
|
||||
string PackId,
|
||||
|
||||
/// <summary>Pack version (e.g., "1.2.3", "latest").</summary>
|
||||
string PackVersion,
|
||||
|
||||
/// <summary>Current pack run status.</summary>
|
||||
PackRunStatus Status,
|
||||
|
||||
/// <summary>Priority (higher = more urgent). Default 0.</summary>
|
||||
int Priority,
|
||||
|
||||
/// <summary>Current attempt number (1-based).</summary>
|
||||
int Attempt,
|
||||
|
||||
/// <summary>Maximum retry attempts.</summary>
|
||||
int MaxAttempts,
|
||||
|
||||
/// <summary>Pack input parameters JSON.</summary>
|
||||
string Parameters,
|
||||
|
||||
/// <summary>SHA-256 digest of the parameters for determinism verification.</summary>
|
||||
string ParametersDigest,
|
||||
|
||||
/// <summary>Idempotency key for deduplication.</summary>
|
||||
string IdempotencyKey,
|
||||
|
||||
/// <summary>Correlation ID for distributed tracing.</summary>
|
||||
string? CorrelationId,
|
||||
|
||||
/// <summary>Current lease ID (if leased to a task runner).</summary>
|
||||
Guid? LeaseId,
|
||||
|
||||
/// <summary>Task runner executing this pack run.</summary>
|
||||
string? TaskRunnerId,
|
||||
|
||||
/// <summary>Lease expiration time.</summary>
|
||||
DateTimeOffset? LeaseUntil,
|
||||
|
||||
/// <summary>When the pack run was created.</summary>
|
||||
DateTimeOffset CreatedAt,
|
||||
|
||||
/// <summary>When the pack run was scheduled (quota cleared).</summary>
|
||||
DateTimeOffset? ScheduledAt,
|
||||
|
||||
/// <summary>When the pack run was leased to a task runner.</summary>
|
||||
DateTimeOffset? LeasedAt,
|
||||
|
||||
/// <summary>When the pack run started executing.</summary>
|
||||
DateTimeOffset? StartedAt,
|
||||
|
||||
/// <summary>When the pack run completed (terminal state).</summary>
|
||||
DateTimeOffset? CompletedAt,
|
||||
|
||||
/// <summary>Earliest time the pack run can be scheduled (for backoff).</summary>
|
||||
DateTimeOffset? NotBefore,
|
||||
|
||||
/// <summary>Terminal status reason (failure message, cancel reason, etc.).</summary>
|
||||
string? Reason,
|
||||
|
||||
/// <summary>Exit code from pack execution (null if not completed).</summary>
|
||||
int? ExitCode,
|
||||
|
||||
/// <summary>Duration of pack execution in milliseconds.</summary>
|
||||
long? DurationMs,
|
||||
|
||||
/// <summary>Actor who initiated the pack run.</summary>
|
||||
string CreatedBy,
|
||||
|
||||
/// <summary>Optional metadata JSON blob (e.g., trigger info, source context).</summary>
|
||||
string? Metadata)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a new pack run in pending status.
|
||||
/// </summary>
|
||||
public static PackRun Create(
|
||||
Guid packRunId,
|
||||
string tenantId,
|
||||
string? projectId,
|
||||
string packId,
|
||||
string packVersion,
|
||||
string parameters,
|
||||
string parametersDigest,
|
||||
string idempotencyKey,
|
||||
string? correlationId,
|
||||
string createdBy,
|
||||
int priority = 0,
|
||||
int maxAttempts = 3,
|
||||
string? metadata = null,
|
||||
DateTimeOffset? createdAt = null)
|
||||
{
|
||||
return new PackRun(
|
||||
PackRunId: packRunId,
|
||||
TenantId: tenantId,
|
||||
ProjectId: projectId,
|
||||
PackId: packId,
|
||||
PackVersion: packVersion,
|
||||
Status: PackRunStatus.Pending,
|
||||
Priority: priority,
|
||||
Attempt: 1,
|
||||
MaxAttempts: maxAttempts,
|
||||
Parameters: parameters,
|
||||
ParametersDigest: parametersDigest,
|
||||
IdempotencyKey: idempotencyKey,
|
||||
CorrelationId: correlationId,
|
||||
LeaseId: null,
|
||||
TaskRunnerId: null,
|
||||
LeaseUntil: null,
|
||||
CreatedAt: createdAt ?? DateTimeOffset.UtcNow,
|
||||
ScheduledAt: null,
|
||||
LeasedAt: null,
|
||||
StartedAt: null,
|
||||
CompletedAt: null,
|
||||
NotBefore: null,
|
||||
Reason: null,
|
||||
ExitCode: null,
|
||||
DurationMs: null,
|
||||
CreatedBy: createdBy,
|
||||
Metadata: metadata);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the pack run is in a terminal state.
|
||||
/// </summary>
|
||||
public bool IsTerminal => Status is PackRunStatus.Succeeded or PackRunStatus.Failed or PackRunStatus.Canceled or PackRunStatus.TimedOut;
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the pack run can be retried.
|
||||
/// </summary>
|
||||
public bool CanRetry => Attempt < MaxAttempts && Status == PackRunStatus.Failed;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pack run lifecycle states.
|
||||
/// Transitions follow the state machine:
|
||||
/// Pending → Scheduled → Leased → Running → (Succeeded | Failed | Canceled | TimedOut)
|
||||
/// Failed pack runs may transition to Pending via retry.
|
||||
/// </summary>
|
||||
public enum PackRunStatus
|
||||
{
|
||||
/// <summary>Pack run created but not yet scheduled (e.g., quota exceeded).</summary>
|
||||
Pending = 0,
|
||||
|
||||
/// <summary>Pack run scheduled and awaiting task runner lease.</summary>
|
||||
Scheduled = 1,
|
||||
|
||||
/// <summary>Pack run leased to a task runner.</summary>
|
||||
Leased = 2,
|
||||
|
||||
/// <summary>Pack run is executing (received start signal from runner).</summary>
|
||||
Running = 3,
|
||||
|
||||
/// <summary>Pack run completed successfully (exit code 0).</summary>
|
||||
Succeeded = 4,
|
||||
|
||||
/// <summary>Pack run failed (non-zero exit or execution error).</summary>
|
||||
Failed = 5,
|
||||
|
||||
/// <summary>Pack run canceled by operator or system.</summary>
|
||||
Canceled = 6,
|
||||
|
||||
/// <summary>Pack run timed out (lease expired without completion).</summary>
|
||||
TimedOut = 7
|
||||
}
|
||||
@@ -0,0 +1,191 @@
|
||||
namespace StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a log entry from a pack run execution.
|
||||
/// Log entries are append-only and ordered by sequence number within a pack run.
|
||||
/// </summary>
|
||||
public sealed record PackRunLog(
|
||||
/// <summary>Unique log entry identifier.</summary>
|
||||
Guid LogId,
|
||||
|
||||
/// <summary>Tenant owning this log entry.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Pack run this log belongs to.</summary>
|
||||
Guid PackRunId,
|
||||
|
||||
/// <summary>Sequence number within the pack run (0-based, monotonically increasing).</summary>
|
||||
long Sequence,
|
||||
|
||||
/// <summary>Log level (info, warn, error, debug, trace).</summary>
|
||||
LogLevel Level,
|
||||
|
||||
/// <summary>Log source (e.g., "stdout", "stderr", "system", "pack").</summary>
|
||||
string Source,
|
||||
|
||||
/// <summary>Log message content.</summary>
|
||||
string Message,
|
||||
|
||||
/// <summary>When the log entry was created.</summary>
|
||||
DateTimeOffset Timestamp,
|
||||
|
||||
/// <summary>Optional structured data JSON (e.g., key-value pairs, metrics).</summary>
|
||||
string? Data)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a new log entry.
|
||||
/// </summary>
|
||||
public static PackRunLog Create(
|
||||
Guid packRunId,
|
||||
string tenantId,
|
||||
long sequence,
|
||||
LogLevel level,
|
||||
string source,
|
||||
string message,
|
||||
string? data = null,
|
||||
DateTimeOffset? timestamp = null)
|
||||
{
|
||||
return new PackRunLog(
|
||||
LogId: Guid.NewGuid(),
|
||||
TenantId: tenantId,
|
||||
PackRunId: packRunId,
|
||||
Sequence: sequence,
|
||||
Level: level,
|
||||
Source: source,
|
||||
Message: message,
|
||||
Timestamp: timestamp ?? DateTimeOffset.UtcNow,
|
||||
Data: data);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an info-level stdout log entry.
|
||||
/// </summary>
|
||||
public static PackRunLog Stdout(
|
||||
Guid packRunId,
|
||||
string tenantId,
|
||||
long sequence,
|
||||
string message,
|
||||
DateTimeOffset? timestamp = null)
|
||||
{
|
||||
return Create(packRunId, tenantId, sequence, LogLevel.Info, "stdout", message, null, timestamp);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a warn-level stderr log entry.
|
||||
/// </summary>
|
||||
public static PackRunLog Stderr(
|
||||
Guid packRunId,
|
||||
string tenantId,
|
||||
long sequence,
|
||||
string message,
|
||||
DateTimeOffset? timestamp = null)
|
||||
{
|
||||
return Create(packRunId, tenantId, sequence, LogLevel.Warn, "stderr", message, null, timestamp);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a system-level log entry (lifecycle events).
|
||||
/// </summary>
|
||||
public static PackRunLog System(
|
||||
Guid packRunId,
|
||||
string tenantId,
|
||||
long sequence,
|
||||
LogLevel level,
|
||||
string message,
|
||||
string? data = null,
|
||||
DateTimeOffset? timestamp = null)
|
||||
{
|
||||
return Create(packRunId, tenantId, sequence, level, "system", message, data, timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Log levels for pack run logs.
|
||||
/// </summary>
|
||||
public enum LogLevel
|
||||
{
|
||||
/// <summary>Trace-level logging (most verbose).</summary>
|
||||
Trace = 0,
|
||||
|
||||
/// <summary>Debug-level logging.</summary>
|
||||
Debug = 1,
|
||||
|
||||
/// <summary>Informational messages (default for stdout).</summary>
|
||||
Info = 2,
|
||||
|
||||
/// <summary>Warning messages (default for stderr).</summary>
|
||||
Warn = 3,
|
||||
|
||||
/// <summary>Error messages.</summary>
|
||||
Error = 4,
|
||||
|
||||
/// <summary>Fatal/critical errors.</summary>
|
||||
Fatal = 5
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a batch of log entries for efficient streaming/storage.
|
||||
/// </summary>
|
||||
public sealed record PackRunLogBatch(
|
||||
/// <summary>Pack run ID these logs belong to.</summary>
|
||||
Guid PackRunId,
|
||||
|
||||
/// <summary>Tenant owning these logs.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Starting sequence number of this batch.</summary>
|
||||
long StartSequence,
|
||||
|
||||
/// <summary>Log entries in this batch.</summary>
|
||||
IReadOnlyList<PackRunLog> Logs)
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the next expected sequence number after this batch.
|
||||
/// </summary>
|
||||
public long NextSequence => StartSequence + Logs.Count;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a batch from a list of logs.
|
||||
/// </summary>
|
||||
public static PackRunLogBatch FromLogs(Guid packRunId, string tenantId, IReadOnlyList<PackRunLog> logs)
|
||||
{
|
||||
if (logs.Count == 0)
|
||||
return new PackRunLogBatch(packRunId, tenantId, 0, logs);
|
||||
|
||||
return new PackRunLogBatch(packRunId, tenantId, logs[0].Sequence, logs);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a log cursor for resumable streaming.
|
||||
/// </summary>
|
||||
public sealed record PackRunLogCursor(
|
||||
/// <summary>Pack run ID.</summary>
|
||||
Guid PackRunId,
|
||||
|
||||
/// <summary>Last seen sequence number.</summary>
|
||||
long LastSequence,
|
||||
|
||||
/// <summary>Whether we've reached the end of current logs.</summary>
|
||||
bool IsComplete)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a cursor starting from the beginning.
|
||||
/// </summary>
|
||||
public static PackRunLogCursor Start(Guid packRunId) => new(packRunId, -1, false);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a cursor for resuming from a specific sequence.
|
||||
/// </summary>
|
||||
public static PackRunLogCursor Resume(Guid packRunId, long lastSequence) => new(packRunId, lastSequence, false);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a completed cursor.
|
||||
/// </summary>
|
||||
public PackRunLogCursor Complete() => this with { IsComplete = true };
|
||||
|
||||
/// <summary>
|
||||
/// Advances the cursor to a new sequence.
|
||||
/// </summary>
|
||||
public PackRunLogCursor Advance(long newSequence) => this with { LastSequence = newSequence };
|
||||
}
|
||||
@@ -0,0 +1,341 @@
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
using StellaOps.Orchestrator.Core.Domain.Export;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Service for managing export jobs.
|
||||
/// Provides high-level operations for creating, scheduling, and tracking export jobs.
|
||||
/// </summary>
|
||||
public interface IExportJobService
|
||||
{
|
||||
/// <summary>Creates a new export job.</summary>
|
||||
Task<Job> CreateExportJobAsync(
|
||||
string tenantId,
|
||||
string exportType,
|
||||
ExportJobPayload payload,
|
||||
string createdBy,
|
||||
string? projectId = null,
|
||||
string? correlationId = null,
|
||||
int? priority = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Gets an export job by ID.</summary>
|
||||
Task<Job?> GetExportJobAsync(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Lists export jobs with optional filters.</summary>
|
||||
Task<IReadOnlyList<Job>> ListExportJobsAsync(
|
||||
string tenantId,
|
||||
string? exportType = null,
|
||||
JobStatus? status = null,
|
||||
string? projectId = null,
|
||||
DateTimeOffset? createdAfter = null,
|
||||
DateTimeOffset? createdBefore = null,
|
||||
int limit = 50,
|
||||
int offset = 0,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Cancels an export job.</summary>
|
||||
Task<bool> CancelExportJobAsync(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
string reason,
|
||||
string canceledBy,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Gets the quota status for export jobs.</summary>
|
||||
Task<ExportQuotaStatus> GetQuotaStatusAsync(
|
||||
string tenantId,
|
||||
string? exportType = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Ensures quota exists for an export type, creating with defaults if needed.</summary>
|
||||
Task<Quota> EnsureQuotaAsync(
|
||||
string tenantId,
|
||||
string exportType,
|
||||
string createdBy,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export quota status information.
|
||||
/// </summary>
|
||||
public sealed record ExportQuotaStatus(
|
||||
/// <summary>Maximum concurrent active jobs.</summary>
|
||||
int MaxActive,
|
||||
|
||||
/// <summary>Current active jobs.</summary>
|
||||
int CurrentActive,
|
||||
|
||||
/// <summary>Maximum jobs per hour.</summary>
|
||||
int MaxPerHour,
|
||||
|
||||
/// <summary>Current hour job count.</summary>
|
||||
int CurrentHourCount,
|
||||
|
||||
/// <summary>Available tokens in bucket.</summary>
|
||||
double AvailableTokens,
|
||||
|
||||
/// <summary>Whether quota is paused.</summary>
|
||||
bool Paused,
|
||||
|
||||
/// <summary>Reason for pause (if paused).</summary>
|
||||
string? PauseReason,
|
||||
|
||||
/// <summary>Whether more jobs can be created.</summary>
|
||||
bool CanCreateJob,
|
||||
|
||||
/// <summary>Estimated wait time if quota exhausted.</summary>
|
||||
TimeSpan? EstimatedWaitTime);
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of the export job service.
|
||||
/// </summary>
|
||||
public sealed class ExportJobService : IExportJobService
|
||||
{
|
||||
private readonly IJobRepository _jobRepository;
|
||||
private readonly IQuotaRepository _quotaRepository;
|
||||
|
||||
public ExportJobService(
|
||||
IJobRepository jobRepository,
|
||||
IQuotaRepository quotaRepository)
|
||||
{
|
||||
_jobRepository = jobRepository;
|
||||
_quotaRepository = quotaRepository;
|
||||
}
|
||||
|
||||
public async Task<Job> CreateExportJobAsync(
|
||||
string tenantId,
|
||||
string exportType,
|
||||
ExportJobPayload payload,
|
||||
string createdBy,
|
||||
string? projectId = null,
|
||||
string? correlationId = null,
|
||||
int? priority = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(exportType);
|
||||
ArgumentNullException.ThrowIfNull(payload);
|
||||
|
||||
if (!ExportJobTypes.IsExportJob(exportType))
|
||||
{
|
||||
throw new ArgumentException($"Invalid export job type: {exportType}", nameof(exportType));
|
||||
}
|
||||
|
||||
var payloadJson = payload.ToJson();
|
||||
var payloadDigest = payload.ComputeDigest();
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var job = new Job(
|
||||
JobId: Guid.NewGuid(),
|
||||
TenantId: tenantId,
|
||||
ProjectId: projectId,
|
||||
RunId: null,
|
||||
JobType: exportType,
|
||||
Status: JobStatus.Pending,
|
||||
Priority: priority ?? ExportJobPolicy.QuotaDefaults.DefaultPriority,
|
||||
Attempt: 1,
|
||||
MaxAttempts: ExportJobPolicy.QuotaDefaults.MaxAttempts,
|
||||
PayloadDigest: payloadDigest,
|
||||
Payload: payloadJson,
|
||||
IdempotencyKey: $"{tenantId}:{exportType}:{payloadDigest}:{now.Ticks}",
|
||||
CorrelationId: correlationId,
|
||||
LeaseId: null,
|
||||
WorkerId: null,
|
||||
TaskRunnerId: null,
|
||||
LeaseUntil: null,
|
||||
CreatedAt: now,
|
||||
ScheduledAt: null,
|
||||
LeasedAt: null,
|
||||
CompletedAt: null,
|
||||
NotBefore: null,
|
||||
Reason: null,
|
||||
ReplayOf: null,
|
||||
CreatedBy: createdBy);
|
||||
|
||||
await _jobRepository.CreateAsync(job, cancellationToken);
|
||||
|
||||
return job;
|
||||
}
|
||||
|
||||
public async Task<Job?> GetExportJobAsync(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var job = await _jobRepository.GetAsync(tenantId, jobId, cancellationToken);
|
||||
|
||||
if (job is not null && !ExportJobTypes.IsExportJob(job.JobType))
|
||||
{
|
||||
return null; // Not an export job
|
||||
}
|
||||
|
||||
return job;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<Job>> ListExportJobsAsync(
|
||||
string tenantId,
|
||||
string? exportType = null,
|
||||
JobStatus? status = null,
|
||||
string? projectId = null,
|
||||
DateTimeOffset? createdAfter = null,
|
||||
DateTimeOffset? createdBefore = null,
|
||||
int limit = 50,
|
||||
int offset = 0,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// If no specific export type, use the prefix to filter all export jobs
|
||||
var jobTypeFilter = exportType ?? ExportJobTypes.Prefix;
|
||||
|
||||
var jobs = await _jobRepository.ListAsync(
|
||||
tenantId,
|
||||
status,
|
||||
jobTypeFilter,
|
||||
projectId,
|
||||
createdAfter,
|
||||
createdBefore,
|
||||
limit,
|
||||
offset,
|
||||
cancellationToken);
|
||||
|
||||
// Additional filter for export jobs only (in case repository doesn't support prefix matching)
|
||||
return jobs.Where(j => ExportJobTypes.IsExportJob(j.JobType)).ToList();
|
||||
}
|
||||
|
||||
public async Task<bool> CancelExportJobAsync(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
string reason,
|
||||
string canceledBy,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var job = await GetExportJobAsync(tenantId, jobId, cancellationToken);
|
||||
if (job is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Can only cancel pending or leased jobs
|
||||
if (job.Status != JobStatus.Pending && job.Status != JobStatus.Leased)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var success = await _jobRepository.CancelAsync(tenantId, jobId, reason, canceledBy, cancellationToken);
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
public async Task<ExportQuotaStatus> GetQuotaStatusAsync(
|
||||
string tenantId,
|
||||
string? exportType = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var quota = await _quotaRepository.GetByTenantAndJobTypeAsync(
|
||||
tenantId,
|
||||
exportType,
|
||||
cancellationToken);
|
||||
|
||||
if (quota is null)
|
||||
{
|
||||
// No quota configured - return defaults (unlimited)
|
||||
return new ExportQuotaStatus(
|
||||
MaxActive: int.MaxValue,
|
||||
CurrentActive: 0,
|
||||
MaxPerHour: int.MaxValue,
|
||||
CurrentHourCount: 0,
|
||||
AvailableTokens: double.MaxValue,
|
||||
Paused: false,
|
||||
PauseReason: null,
|
||||
CanCreateJob: true,
|
||||
EstimatedWaitTime: null);
|
||||
}
|
||||
|
||||
var canCreate = !quota.Paused
|
||||
&& quota.CurrentActive < quota.MaxActive
|
||||
&& quota.CurrentTokens >= 1.0;
|
||||
|
||||
TimeSpan? waitTime = null;
|
||||
if (!canCreate && !quota.Paused)
|
||||
{
|
||||
if (quota.CurrentActive >= quota.MaxActive)
|
||||
{
|
||||
// Estimate based on typical job duration
|
||||
var rateLimit = ExportJobPolicy.RateLimits.GetForJobType(exportType ?? ExportJobTypes.Ledger);
|
||||
waitTime = TimeSpan.FromSeconds(rateLimit.EstimatedDurationSeconds);
|
||||
}
|
||||
else if (quota.CurrentTokens < 1.0)
|
||||
{
|
||||
// Estimate based on refill rate
|
||||
var tokensNeeded = 1.0 - quota.CurrentTokens;
|
||||
waitTime = TimeSpan.FromSeconds(tokensNeeded / quota.RefillRate);
|
||||
}
|
||||
}
|
||||
|
||||
return new ExportQuotaStatus(
|
||||
MaxActive: quota.MaxActive,
|
||||
CurrentActive: quota.CurrentActive,
|
||||
MaxPerHour: quota.MaxPerHour,
|
||||
CurrentHourCount: quota.CurrentHourCount,
|
||||
AvailableTokens: quota.CurrentTokens,
|
||||
Paused: quota.Paused,
|
||||
PauseReason: quota.PauseReason,
|
||||
CanCreateJob: canCreate,
|
||||
EstimatedWaitTime: waitTime);
|
||||
}
|
||||
|
||||
public async Task<Quota> EnsureQuotaAsync(
|
||||
string tenantId,
|
||||
string exportType,
|
||||
string createdBy,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var existing = await _quotaRepository.GetByTenantAndJobTypeAsync(
|
||||
tenantId,
|
||||
exportType,
|
||||
cancellationToken);
|
||||
|
||||
if (existing is not null)
|
||||
{
|
||||
return existing;
|
||||
}
|
||||
|
||||
var quota = ExportJobPolicy.CreateDefaultQuota(tenantId, exportType, createdBy);
|
||||
await _quotaRepository.CreateAsync(quota, cancellationToken);
|
||||
|
||||
return quota;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Job repository interface extensions for export jobs.
|
||||
/// </summary>
|
||||
public interface IJobRepository
|
||||
{
|
||||
Task CreateAsync(Job job, CancellationToken cancellationToken);
|
||||
Task<Job?> GetAsync(string tenantId, Guid jobId, CancellationToken cancellationToken);
|
||||
Task<IReadOnlyList<Job>> ListAsync(
|
||||
string tenantId,
|
||||
JobStatus? status,
|
||||
string? jobType,
|
||||
string? projectId,
|
||||
DateTimeOffset? createdAfter,
|
||||
DateTimeOffset? createdBefore,
|
||||
int limit,
|
||||
int offset,
|
||||
CancellationToken cancellationToken);
|
||||
Task<bool> CancelAsync(string tenantId, Guid jobId, string reason, string canceledBy, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Quota repository interface.
|
||||
/// </summary>
|
||||
public interface IQuotaRepository
|
||||
{
|
||||
Task<Quota?> GetByTenantAndJobTypeAsync(string tenantId, string? jobType, CancellationToken cancellationToken);
|
||||
Task CreateAsync(Quota quota, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,286 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
|
||||
namespace StellaOps.Orchestrator.Infrastructure.Events;
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of event publisher with idempotency and retries.
|
||||
/// </summary>
|
||||
public sealed class OrchestratorEventPublisher : IEventPublisher
|
||||
{
|
||||
private readonly IIdempotencyStore _idempotencyStore;
|
||||
private readonly INotifierBus _notifierBus;
|
||||
private readonly IEventSigner? _eventSigner;
|
||||
private readonly EventPublishOptions _options;
|
||||
private readonly ILogger<OrchestratorEventPublisher> _logger;
|
||||
|
||||
public OrchestratorEventPublisher(
|
||||
IIdempotencyStore idempotencyStore,
|
||||
INotifierBus notifierBus,
|
||||
IOptions<EventPublishOptions> options,
|
||||
ILogger<OrchestratorEventPublisher> logger,
|
||||
IEventSigner? eventSigner = null)
|
||||
{
|
||||
_idempotencyStore = idempotencyStore;
|
||||
_notifierBus = notifierBus;
|
||||
_eventSigner = eventSigner;
|
||||
_options = options.Value;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<bool> PublishAsync(EventEnvelope envelope, CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Check idempotency
|
||||
if (!await _idempotencyStore.TryMarkAsync(envelope.IdempotencyKey, _options.IdempotencyTtl, cancellationToken))
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Event {EventId} deduplicated by idempotency key {IdempotencyKey}",
|
||||
envelope.EventId, envelope.IdempotencyKey);
|
||||
OrchestratorMetrics.EventDeduplicated(envelope.TenantId, envelope.EventType.ToEventTypeName());
|
||||
return false;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var message = await PrepareMessageAsync(envelope, cancellationToken);
|
||||
var channel = GetChannel(envelope);
|
||||
|
||||
await PublishWithRetryAsync(channel, message, cancellationToken);
|
||||
|
||||
OrchestratorMetrics.EventPublished(envelope.TenantId, envelope.EventType.ToEventTypeName());
|
||||
|
||||
_logger.LogInformation(
|
||||
"Published event {EventId} type {EventType} to channel {Channel}",
|
||||
envelope.EventId, envelope.EventType, channel);
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Remove idempotency key on failure to allow retry
|
||||
await _idempotencyStore.RemoveAsync(envelope.IdempotencyKey, cancellationToken);
|
||||
OrchestratorMetrics.EventPublishFailed(envelope.TenantId, envelope.EventType.ToEventTypeName());
|
||||
|
||||
_logger.LogError(ex,
|
||||
"Failed to publish event {EventId} type {EventType}",
|
||||
envelope.EventId, envelope.EventType);
|
||||
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<BatchPublishResult> PublishBatchAsync(
|
||||
IEnumerable<EventEnvelope> envelopes,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var published = 0;
|
||||
var deduplicated = 0;
|
||||
var failed = 0;
|
||||
var errors = new List<string>();
|
||||
|
||||
foreach (var envelope in envelopes)
|
||||
{
|
||||
try
|
||||
{
|
||||
var result = await PublishAsync(envelope, cancellationToken);
|
||||
if (result)
|
||||
published++;
|
||||
else
|
||||
deduplicated++;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
failed++;
|
||||
errors.Add($"{envelope.EventId}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return new BatchPublishResult(published, deduplicated, failed, errors);
|
||||
}
|
||||
|
||||
public async Task<bool> IsPublishedAsync(string idempotencyKey, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return await _idempotencyStore.ExistsAsync(idempotencyKey, cancellationToken);
|
||||
}
|
||||
|
||||
private async Task<string> PrepareMessageAsync(EventEnvelope envelope, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_options.SignWithDsse && _eventSigner is not null)
|
||||
{
|
||||
return await _eventSigner.SignAsync(envelope, cancellationToken);
|
||||
}
|
||||
|
||||
return envelope.ToJson();
|
||||
}
|
||||
|
||||
private static string GetChannel(EventEnvelope envelope)
|
||||
{
|
||||
return envelope.Notifier?.Channel ?? envelope.EventType switch
|
||||
{
|
||||
OrchestratorEventType.ExportCreated or
|
||||
OrchestratorEventType.ExportStarted or
|
||||
OrchestratorEventType.ExportCompleted or
|
||||
OrchestratorEventType.ExportFailed or
|
||||
OrchestratorEventType.ExportCanceled or
|
||||
OrchestratorEventType.ExportArchived or
|
||||
OrchestratorEventType.ExportExpired or
|
||||
OrchestratorEventType.ExportDeleted => "orch.exports",
|
||||
|
||||
OrchestratorEventType.PolicyUpdated or
|
||||
OrchestratorEventType.PolicySimulated or
|
||||
OrchestratorEventType.PolicyApplied => "orch.policy",
|
||||
|
||||
OrchestratorEventType.ScheduleCreated or
|
||||
OrchestratorEventType.ScheduleEnabled or
|
||||
OrchestratorEventType.ScheduleDisabled or
|
||||
OrchestratorEventType.ScheduleTriggered or
|
||||
OrchestratorEventType.ScheduleSkipped => "orch.schedules",
|
||||
|
||||
OrchestratorEventType.AlertCreated or
|
||||
OrchestratorEventType.AlertAcknowledged or
|
||||
OrchestratorEventType.AlertResolved => "orch.alerts",
|
||||
|
||||
OrchestratorEventType.PackRunCreated or
|
||||
OrchestratorEventType.PackRunStarted or
|
||||
OrchestratorEventType.PackRunLog or
|
||||
OrchestratorEventType.PackRunArtifact or
|
||||
OrchestratorEventType.PackRunCompleted or
|
||||
OrchestratorEventType.PackRunFailed => "orch.pack_runs",
|
||||
|
||||
_ => "orch.jobs"
|
||||
};
|
||||
}
|
||||
|
||||
private async Task PublishWithRetryAsync(string channel, string message, CancellationToken cancellationToken)
|
||||
{
|
||||
var attempt = 0;
|
||||
var delay = _options.RetryDelay;
|
||||
|
||||
while (true)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _notifierBus.SendAsync(channel, message, cancellationToken);
|
||||
return;
|
||||
}
|
||||
catch (Exception ex) when (attempt < _options.MaxRetries && IsTransient(ex))
|
||||
{
|
||||
attempt++;
|
||||
_logger.LogWarning(ex,
|
||||
"Transient failure publishing to {Channel}, attempt {Attempt}/{MaxRetries}",
|
||||
channel, attempt, _options.MaxRetries);
|
||||
|
||||
await Task.Delay(delay, cancellationToken);
|
||||
delay = TimeSpan.FromMilliseconds(delay.TotalMilliseconds * 2); // Exponential backoff
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsTransient(Exception ex)
|
||||
{
|
||||
return ex is TimeoutException or
|
||||
TaskCanceledException or
|
||||
System.Net.Http.HttpRequestException or
|
||||
System.IO.IOException;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Null implementation of notifier bus for testing.
|
||||
/// </summary>
|
||||
public sealed class NullNotifierBus : INotifierBus
|
||||
{
|
||||
/// <summary>Singleton instance.</summary>
|
||||
public static NullNotifierBus Instance { get; } = new();
|
||||
|
||||
private readonly List<(string Channel, string Message)> _messages = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task SendAsync(string channel, string message, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_messages.Add((channel, message));
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task SendBatchAsync(string channel, IEnumerable<string> messages, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
foreach (var message in messages)
|
||||
{
|
||||
_messages.Add((channel, message));
|
||||
}
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>Gets all sent messages (for testing).</summary>
|
||||
public IReadOnlyList<(string Channel, string Message)> GetMessages()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _messages.ToList();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Gets messages for a specific channel (for testing).</summary>
|
||||
public IReadOnlyList<string> GetMessages(string channel)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _messages.Where(m => m.Channel == channel).Select(m => m.Message).ToList();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Clears all messages (for testing).</summary>
|
||||
public void Clear()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_messages.Clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Null implementation of event signer for testing.
|
||||
/// </summary>
|
||||
public sealed class NullEventSigner : IEventSigner
|
||||
{
|
||||
/// <summary>Singleton instance.</summary>
|
||||
public static NullEventSigner Instance { get; } = new();
|
||||
|
||||
private NullEventSigner() { }
|
||||
|
||||
public Task<string> SignAsync(EventEnvelope envelope, CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Return envelope JSON wrapped in mock DSSE structure
|
||||
var payload = envelope.ToJson();
|
||||
var dsse = $"{{\"payloadType\":\"application/vnd.orch.event+json\",\"payload\":\"{Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(payload))}\",\"signatures\":[]}}";
|
||||
return Task.FromResult(dsse);
|
||||
}
|
||||
|
||||
public Task<EventEnvelope?> VerifyAsync(string signedPayload, CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Extract and parse for testing
|
||||
try
|
||||
{
|
||||
var doc = System.Text.Json.JsonDocument.Parse(signedPayload);
|
||||
if (doc.RootElement.TryGetProperty("payload", out var payloadElement))
|
||||
{
|
||||
var payloadBytes = Convert.FromBase64String(payloadElement.GetString()!);
|
||||
var json = System.Text.Encoding.UTF8.GetString(payloadBytes);
|
||||
return Task.FromResult(EventEnvelope.FromJson(json));
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore parse errors
|
||||
}
|
||||
return Task.FromResult<EventEnvelope?>(null);
|
||||
}
|
||||
}
|
||||
@@ -721,4 +721,637 @@ public static class OrchestratorMetrics
|
||||
|
||||
public static void ScaleDownSignal(string reason)
|
||||
=> ScaleDownSignals.Add(1, new KeyValuePair<string, object?>("reason", reason));
|
||||
|
||||
// Export job metrics
|
||||
private static readonly Counter<long> ExportJobsCreated = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.jobs_created",
|
||||
description: "Total export jobs created");
|
||||
|
||||
private static readonly Counter<long> ExportJobsCompleted = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.jobs_completed",
|
||||
description: "Total export jobs completed successfully");
|
||||
|
||||
private static readonly Counter<long> ExportJobsFailed = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.jobs_failed",
|
||||
description: "Total export jobs that failed");
|
||||
|
||||
private static readonly Counter<long> ExportJobsCanceled = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.jobs_canceled",
|
||||
description: "Total export jobs canceled");
|
||||
|
||||
private static readonly Counter<long> ExportHeartbeats = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.heartbeats",
|
||||
description: "Total export worker heartbeats");
|
||||
|
||||
private static readonly Histogram<double> ExportDuration = Meter.CreateHistogram<double>(
|
||||
"orchestrator.export.duration.seconds",
|
||||
unit: "s",
|
||||
description: "Export job duration");
|
||||
|
||||
private static readonly Histogram<long> ExportSize = Meter.CreateHistogram<long>(
|
||||
"orchestrator.export.size.bytes",
|
||||
unit: "bytes",
|
||||
description: "Export output size");
|
||||
|
||||
private static readonly Histogram<long> ExportEntryCount = Meter.CreateHistogram<long>(
|
||||
"orchestrator.export.entry_count",
|
||||
unit: "entries",
|
||||
description: "Number of entries exported");
|
||||
|
||||
private static readonly UpDownCounter<long> ExportJobsActive = Meter.CreateUpDownCounter<long>(
|
||||
"orchestrator.export.jobs_active",
|
||||
description: "Currently active export jobs");
|
||||
|
||||
private static readonly Counter<long> ExportQuotaDenials = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.quota_denials",
|
||||
description: "Export jobs denied due to quota");
|
||||
|
||||
public static void ExportJobCreated(string tenantId, string exportType, string format)
|
||||
=> ExportJobsCreated.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("format", format));
|
||||
|
||||
public static void ExportJobCompleted(string tenantId, string exportType, string format)
|
||||
=> ExportJobsCompleted.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("format", format));
|
||||
|
||||
public static void ExportJobFailed(string tenantId, string exportType, string reason)
|
||||
=> ExportJobsFailed.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("reason", reason));
|
||||
|
||||
public static void ExportJobCanceled(string tenantId, string exportType)
|
||||
=> ExportJobsCanceled.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
|
||||
public static void ExportHeartbeat(string tenantId, string exportType)
|
||||
=> ExportHeartbeats.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
|
||||
public static void RecordExportDuration(string tenantId, string exportType, string format, double durationSeconds)
|
||||
=> ExportDuration.Record(durationSeconds,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("format", format));
|
||||
|
||||
public static void RecordExportSize(string tenantId, string exportType, string format, long sizeBytes)
|
||||
=> ExportSize.Record(sizeBytes,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("format", format));
|
||||
|
||||
public static void RecordExportEntryCount(string tenantId, string exportType, long entryCount)
|
||||
=> ExportEntryCount.Record(entryCount,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
|
||||
public static void ExportJobStarted(string tenantId, string exportType)
|
||||
=> ExportJobsActive.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
|
||||
public static void ExportJobFinished(string tenantId, string exportType)
|
||||
=> ExportJobsActive.Add(-1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
|
||||
public static void ExportQuotaDenied(string tenantId, string exportType, string reason)
|
||||
=> ExportQuotaDenials.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("reason", reason));
|
||||
|
||||
// Export distribution metrics
|
||||
private static readonly Counter<long> ExportDistributionsCreated = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.distributions_created",
|
||||
description: "Total export distributions created");
|
||||
|
||||
private static readonly Counter<long> ExportReplicationsStarted = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.replications_started",
|
||||
description: "Export replication operations started");
|
||||
|
||||
private static readonly Counter<long> ExportReplicationsCompleted = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.replications_completed",
|
||||
description: "Export replication operations completed");
|
||||
|
||||
private static readonly Counter<long> ExportReplicationsFailed = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.replications_failed",
|
||||
description: "Export replication operations failed");
|
||||
|
||||
private static readonly Counter<long> ExportDownloadsGenerated = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.downloads_generated",
|
||||
description: "Pre-signed download URLs generated");
|
||||
|
||||
public static void ExportDistributionCreated(string tenantId, string exportType, string storageProvider)
|
||||
=> ExportDistributionsCreated.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("storage_provider", storageProvider));
|
||||
|
||||
public static void ExportReplicationStarted(string tenantId, string exportType, string target)
|
||||
=> ExportReplicationsStarted.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("target", target));
|
||||
|
||||
public static void ExportReplicationCompleted(string tenantId, string exportType, string target)
|
||||
=> ExportReplicationsCompleted.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("target", target));
|
||||
|
||||
public static void ExportReplicationFailed(string tenantId, string exportType, string target, string error)
|
||||
=> ExportReplicationsFailed.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("target", target),
|
||||
new KeyValuePair<string, object?>("error", error));
|
||||
|
||||
public static void ExportDownloadGenerated(string tenantId, string exportType)
|
||||
=> ExportDownloadsGenerated.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
|
||||
// Export retention metrics
|
||||
private static readonly Counter<long> ExportRetentionsApplied = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.retentions_applied",
|
||||
description: "Retention policies applied to exports");
|
||||
|
||||
private static readonly Counter<long> ExportRetentionsExtended = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.retentions_extended",
|
||||
description: "Export retention periods extended");
|
||||
|
||||
private static readonly Counter<long> ExportLegalHoldsPlaced = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.legal_holds_placed",
|
||||
description: "Legal holds placed on exports");
|
||||
|
||||
private static readonly Counter<long> ExportLegalHoldsReleased = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.legal_holds_released",
|
||||
description: "Legal holds released on exports");
|
||||
|
||||
private static readonly Counter<long> ExportsArchived = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.archived",
|
||||
description: "Exports moved to archive tier");
|
||||
|
||||
private static readonly Counter<long> ExportsExpired = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.expired",
|
||||
description: "Exports that have expired");
|
||||
|
||||
private static readonly Counter<long> ExportsDeleted = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.deleted",
|
||||
description: "Exports deleted after retention");
|
||||
|
||||
private static readonly UpDownCounter<long> ExportsWithLegalHold = Meter.CreateUpDownCounter<long>(
|
||||
"orchestrator.export.with_legal_hold",
|
||||
description: "Current exports under legal hold");
|
||||
|
||||
public static void ExportRetentionApplied(string tenantId, string exportType, string policyName)
|
||||
=> ExportRetentionsApplied.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("policy_name", policyName));
|
||||
|
||||
public static void ExportRetentionExtended(string tenantId, string exportType, int extensionCount)
|
||||
=> ExportRetentionsExtended.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("extension_count", extensionCount));
|
||||
|
||||
public static void ExportLegalHoldPlaced(string tenantId, string exportType)
|
||||
{
|
||||
ExportLegalHoldsPlaced.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
ExportsWithLegalHold.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
}
|
||||
|
||||
public static void ExportLegalHoldReleased(string tenantId, string exportType)
|
||||
{
|
||||
ExportLegalHoldsReleased.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
ExportsWithLegalHold.Add(-1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
}
|
||||
|
||||
public static void ExportArchived(string tenantId, string exportType)
|
||||
=> ExportsArchived.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
|
||||
public static void ExportExpired(string tenantId, string exportType)
|
||||
=> ExportsExpired.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
|
||||
public static void ExportDeleted(string tenantId, string exportType)
|
||||
=> ExportsDeleted.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
|
||||
// Export Scheduling Metrics
|
||||
private static readonly Counter<long> ScheduledExportsCreated = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.schedules_created",
|
||||
description: "Export schedules created");
|
||||
|
||||
private static readonly Counter<long> ScheduledExportsEnabled = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.schedules_enabled",
|
||||
description: "Export schedules enabled");
|
||||
|
||||
private static readonly Counter<long> ScheduledExportsDisabled = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.schedules_disabled",
|
||||
description: "Export schedules disabled");
|
||||
|
||||
private static readonly Counter<long> ScheduledExportsTriggered = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.schedules_triggered",
|
||||
description: "Scheduled export runs triggered");
|
||||
|
||||
private static readonly Counter<long> ScheduledExportsSkipped = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.schedules_skipped",
|
||||
description: "Scheduled export runs skipped (already running)");
|
||||
|
||||
private static readonly Counter<long> ScheduledExportsSucceeded = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.schedules_succeeded",
|
||||
description: "Scheduled export runs succeeded");
|
||||
|
||||
private static readonly Counter<long> ScheduledExportsFailed = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.schedules_failed",
|
||||
description: "Scheduled export runs failed");
|
||||
|
||||
private static readonly UpDownCounter<long> ActiveSchedules = Meter.CreateUpDownCounter<long>(
|
||||
"orchestrator.export.active_schedules",
|
||||
description: "Currently active export schedules");
|
||||
|
||||
public static void ExportScheduleCreated(string tenantId, string exportType)
|
||||
{
|
||||
ScheduledExportsCreated.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
ActiveSchedules.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
}
|
||||
|
||||
public static void ExportScheduleEnabled(string tenantId, string exportType)
|
||||
{
|
||||
ScheduledExportsEnabled.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
ActiveSchedules.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
}
|
||||
|
||||
public static void ExportScheduleDisabled(string tenantId, string exportType)
|
||||
{
|
||||
ScheduledExportsDisabled.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
ActiveSchedules.Add(-1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
}
|
||||
|
||||
public static void ExportScheduleTriggered(string tenantId, string exportType, string scheduleName)
|
||||
=> ScheduledExportsTriggered.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("schedule_name", scheduleName));
|
||||
|
||||
public static void ExportScheduleSkipped(string tenantId, string exportType, string scheduleName)
|
||||
=> ScheduledExportsSkipped.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("schedule_name", scheduleName));
|
||||
|
||||
public static void ExportScheduleSucceeded(string tenantId, string exportType, string scheduleName)
|
||||
=> ScheduledExportsSucceeded.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("schedule_name", scheduleName));
|
||||
|
||||
public static void ExportScheduleFailed(string tenantId, string exportType, string scheduleName)
|
||||
=> ScheduledExportsFailed.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("schedule_name", scheduleName));
|
||||
|
||||
// Retention Pruning Metrics
|
||||
private static readonly Counter<long> RetentionPruneRuns = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.prune_runs",
|
||||
description: "Retention prune runs executed");
|
||||
|
||||
private static readonly Counter<long> RetentionPruneArchived = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.prune_archived",
|
||||
description: "Exports archived during pruning");
|
||||
|
||||
private static readonly Counter<long> RetentionPruneDeleted = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.prune_deleted",
|
||||
description: "Exports deleted during pruning");
|
||||
|
||||
private static readonly Counter<long> RetentionPruneSkipped = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.prune_skipped",
|
||||
description: "Exports skipped during pruning (legal hold, etc.)");
|
||||
|
||||
private static readonly Counter<long> RetentionPruneErrors = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.prune_errors",
|
||||
description: "Errors during retention pruning");
|
||||
|
||||
private static readonly Histogram<double> RetentionPruneDuration = Meter.CreateHistogram<double>(
|
||||
"orchestrator.export.prune_duration.seconds",
|
||||
unit: "s",
|
||||
description: "Duration of prune operations");
|
||||
|
||||
public static void ExportPruneRun(string? tenantId, string? exportType)
|
||||
=> RetentionPruneRuns.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId ?? "global"),
|
||||
new KeyValuePair<string, object?>("export_type", exportType ?? "all"));
|
||||
|
||||
public static void ExportPruneArchived(string? tenantId, string? exportType, int count)
|
||||
=> RetentionPruneArchived.Add(count,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId ?? "global"),
|
||||
new KeyValuePair<string, object?>("export_type", exportType ?? "all"));
|
||||
|
||||
public static void ExportPruneDeleted(string? tenantId, string? exportType, int count)
|
||||
=> RetentionPruneDeleted.Add(count,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId ?? "global"),
|
||||
new KeyValuePair<string, object?>("export_type", exportType ?? "all"));
|
||||
|
||||
public static void ExportPruneSkipped(string? tenantId, string? exportType, int count)
|
||||
=> RetentionPruneSkipped.Add(count,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId ?? "global"),
|
||||
new KeyValuePair<string, object?>("export_type", exportType ?? "all"));
|
||||
|
||||
public static void ExportPruneError(string? tenantId, string? exportType)
|
||||
=> RetentionPruneErrors.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId ?? "global"),
|
||||
new KeyValuePair<string, object?>("export_type", exportType ?? "all"));
|
||||
|
||||
public static void ExportPruneDuration(string? tenantId, string? exportType, double seconds)
|
||||
=> RetentionPruneDuration.Record(seconds,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId ?? "global"),
|
||||
new KeyValuePair<string, object?>("export_type", exportType ?? "all"));
|
||||
|
||||
// Export Alerting Metrics
|
||||
private static readonly Counter<long> ExportAlertsCreated = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.alerts_created",
|
||||
description: "Export alerts created");
|
||||
|
||||
private static readonly Counter<long> ExportAlertsAcknowledged = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.alerts_acknowledged",
|
||||
description: "Export alerts acknowledged");
|
||||
|
||||
private static readonly Counter<long> ExportAlertsResolved = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.alerts_resolved",
|
||||
description: "Export alerts resolved");
|
||||
|
||||
private static readonly Counter<long> ExportAlertsSuppressed = Meter.CreateCounter<long>(
|
||||
"orchestrator.export.alerts_suppressed",
|
||||
description: "Export alerts suppressed by cooldown");
|
||||
|
||||
private static readonly UpDownCounter<long> ActiveExportAlerts = Meter.CreateUpDownCounter<long>(
|
||||
"orchestrator.export.active_alerts",
|
||||
description: "Currently active export alerts");
|
||||
|
||||
public static void ExportAlertCreated(string tenantId, string exportType, string severity)
|
||||
{
|
||||
ExportAlertsCreated.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("severity", severity));
|
||||
ActiveExportAlerts.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("severity", severity));
|
||||
}
|
||||
|
||||
public static void ExportAlertAcknowledged(string tenantId, string exportType, string severity)
|
||||
=> ExportAlertsAcknowledged.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("severity", severity));
|
||||
|
||||
public static void ExportAlertResolved(string tenantId, string exportType, string severity)
|
||||
{
|
||||
ExportAlertsResolved.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("severity", severity));
|
||||
ActiveExportAlerts.Add(-1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType),
|
||||
new KeyValuePair<string, object?>("severity", severity));
|
||||
}
|
||||
|
||||
public static void ExportAlertSuppressed(string tenantId, string exportType)
|
||||
=> ExportAlertsSuppressed.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("export_type", exportType));
|
||||
|
||||
// Event Publishing Metrics
|
||||
private static readonly Counter<long> EventsPublished = Meter.CreateCounter<long>(
|
||||
"orchestrator.events.published",
|
||||
description: "Total events published to notifier bus");
|
||||
|
||||
private static readonly Counter<long> EventsDeduplicated = Meter.CreateCounter<long>(
|
||||
"orchestrator.events.deduplicated",
|
||||
description: "Total events deduplicated by idempotency key");
|
||||
|
||||
private static readonly Counter<long> EventsPublishFailed = Meter.CreateCounter<long>(
|
||||
"orchestrator.events.publish_failed",
|
||||
description: "Total events that failed to publish");
|
||||
|
||||
private static readonly Counter<long> EventsSignedCounter = Meter.CreateCounter<long>(
|
||||
"orchestrator.events.signed",
|
||||
description: "Total events signed with DSSE");
|
||||
|
||||
private static readonly Histogram<double> EventPublishLatency = Meter.CreateHistogram<double>(
|
||||
"orchestrator.events.publish_latency.ms",
|
||||
unit: "ms",
|
||||
description: "Event publish latency in milliseconds");
|
||||
|
||||
private static readonly Counter<long> EventRetryAttempts = Meter.CreateCounter<long>(
|
||||
"orchestrator.events.retry_attempts",
|
||||
description: "Total event publish retry attempts");
|
||||
|
||||
public static void EventPublished(string tenantId, string eventType)
|
||||
=> EventsPublished.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("event_type", eventType));
|
||||
|
||||
public static void EventDeduplicated(string tenantId, string eventType)
|
||||
=> EventsDeduplicated.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("event_type", eventType));
|
||||
|
||||
public static void EventPublishFailed(string tenantId, string eventType)
|
||||
=> EventsPublishFailed.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("event_type", eventType));
|
||||
|
||||
public static void EventSigned(string tenantId, string eventType)
|
||||
=> EventsSignedCounter.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("event_type", eventType));
|
||||
|
||||
public static void RecordEventPublishLatency(string tenantId, string eventType, double latencyMs)
|
||||
=> EventPublishLatency.Record(latencyMs,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("event_type", eventType));
|
||||
|
||||
public static void EventRetryAttempt(string tenantId, string eventType, int attempt)
|
||||
=> EventRetryAttempts.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("event_type", eventType),
|
||||
new KeyValuePair<string, object?>("attempt", attempt));
|
||||
|
||||
// Pack Run Metrics
|
||||
private static readonly Counter<long> PackRunsCreated = Meter.CreateCounter<long>(
|
||||
"orchestrator.pack_runs.created",
|
||||
description: "Total pack runs created");
|
||||
|
||||
private static readonly Counter<long> PackRunsScheduled = Meter.CreateCounter<long>(
|
||||
"orchestrator.pack_runs.scheduled",
|
||||
description: "Total pack runs scheduled");
|
||||
|
||||
private static readonly Counter<long> PackRunsLeased = Meter.CreateCounter<long>(
|
||||
"orchestrator.pack_runs.leased",
|
||||
description: "Total pack runs leased to task runners");
|
||||
|
||||
private static readonly Counter<long> PackRunsStarted = Meter.CreateCounter<long>(
|
||||
"orchestrator.pack_runs.started",
|
||||
description: "Total pack runs started executing");
|
||||
|
||||
private static readonly Counter<long> PackRunsCompleted = Meter.CreateCounter<long>(
|
||||
"orchestrator.pack_runs.completed",
|
||||
description: "Total pack runs completed");
|
||||
|
||||
private static readonly Counter<long> PackRunsFailed = Meter.CreateCounter<long>(
|
||||
"orchestrator.pack_runs.failed",
|
||||
description: "Total pack runs failed");
|
||||
|
||||
private static readonly Counter<long> PackRunsCanceled = Meter.CreateCounter<long>(
|
||||
"orchestrator.pack_runs.canceled",
|
||||
description: "Total pack runs canceled");
|
||||
|
||||
private static readonly Counter<long> PackRunsTimedOut = Meter.CreateCounter<long>(
|
||||
"orchestrator.pack_runs.timed_out",
|
||||
description: "Total pack runs that timed out");
|
||||
|
||||
private static readonly Counter<long> PackRunHeartbeats = Meter.CreateCounter<long>(
|
||||
"orchestrator.pack_runs.heartbeats",
|
||||
description: "Total pack run heartbeats received");
|
||||
|
||||
private static readonly Counter<long> PackRunLogsAppended = Meter.CreateCounter<long>(
|
||||
"orchestrator.pack_runs.logs_appended",
|
||||
description: "Total pack run log entries appended");
|
||||
|
||||
private static readonly Histogram<double> PackRunDuration = Meter.CreateHistogram<double>(
|
||||
"orchestrator.pack_run.duration.seconds",
|
||||
unit: "s",
|
||||
description: "Pack run execution duration");
|
||||
|
||||
private static readonly UpDownCounter<long> PackRunsActive = Meter.CreateUpDownCounter<long>(
|
||||
"orchestrator.pack_runs.active",
|
||||
description: "Currently active pack runs");
|
||||
|
||||
private static readonly Histogram<long> PackRunLogCount = Meter.CreateHistogram<long>(
|
||||
"orchestrator.pack_run.log_count",
|
||||
unit: "entries",
|
||||
description: "Number of log entries per pack run");
|
||||
|
||||
public static void PackRunCreated(string tenantId, string packId)
|
||||
{
|
||||
PackRunsCreated.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
PackRunsActive.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
}
|
||||
|
||||
public static void PackRunScheduled(string tenantId, string packId)
|
||||
=> PackRunsScheduled.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
|
||||
public static void PackRunLeased(string tenantId, string packId)
|
||||
=> PackRunsLeased.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
|
||||
public static void PackRunStarted(string tenantId, string packId)
|
||||
=> PackRunsStarted.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
|
||||
public static void PackRunCompleted(string tenantId, string packId, string status)
|
||||
{
|
||||
PackRunsCompleted.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId),
|
||||
new KeyValuePair<string, object?>("status", status));
|
||||
PackRunsActive.Add(-1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
}
|
||||
|
||||
public static void PackRunFailed(string tenantId, string packId)
|
||||
{
|
||||
PackRunsFailed.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
PackRunsActive.Add(-1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
}
|
||||
|
||||
public static void PackRunCanceled(string tenantId, string packId)
|
||||
{
|
||||
PackRunsCanceled.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
PackRunsActive.Add(-1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
}
|
||||
|
||||
public static void PackRunTimedOut(string tenantId, string packId)
|
||||
{
|
||||
PackRunsTimedOut.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
PackRunsActive.Add(-1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
}
|
||||
|
||||
public static void PackRunHeartbeatReceived(string tenantId, string packId)
|
||||
=> PackRunHeartbeats.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
|
||||
public static void PackRunLogAppended(string tenantId, string packId, int count)
|
||||
=> PackRunLogsAppended.Add(count,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
|
||||
public static void RecordPackRunDuration(string tenantId, string packId, double durationSeconds)
|
||||
=> PackRunDuration.Record(durationSeconds,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
|
||||
public static void RecordPackRunLogCount(string tenantId, string packId, long logCount)
|
||||
=> PackRunLogCount.Record(logCount,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("pack_id", packId));
|
||||
}
|
||||
|
||||
@@ -0,0 +1,185 @@
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
namespace StellaOps.Orchestrator.Infrastructure.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for pack run persistence operations.
|
||||
/// </summary>
|
||||
public interface IPackRunRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets a pack run by ID.
|
||||
/// </summary>
|
||||
Task<PackRun?> GetByIdAsync(string tenantId, Guid packRunId, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a pack run by idempotency key.
|
||||
/// </summary>
|
||||
Task<PackRun?> GetByIdempotencyKeyAsync(string tenantId, string idempotencyKey, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new pack run.
|
||||
/// </summary>
|
||||
Task CreateAsync(PackRun packRun, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Updates pack run status and related fields.
|
||||
/// </summary>
|
||||
Task UpdateStatusAsync(
|
||||
string tenantId,
|
||||
Guid packRunId,
|
||||
PackRunStatus status,
|
||||
int attempt,
|
||||
Guid? leaseId,
|
||||
string? taskRunnerId,
|
||||
DateTimeOffset? leaseUntil,
|
||||
DateTimeOffset? scheduledAt,
|
||||
DateTimeOffset? leasedAt,
|
||||
DateTimeOffset? startedAt,
|
||||
DateTimeOffset? completedAt,
|
||||
DateTimeOffset? notBefore,
|
||||
string? reason,
|
||||
int? exitCode,
|
||||
long? durationMs,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Leases the next available pack run for execution.
|
||||
/// Returns null if no pack runs are available.
|
||||
/// </summary>
|
||||
Task<PackRun?> LeaseNextAsync(
|
||||
string tenantId,
|
||||
string? packId,
|
||||
Guid leaseId,
|
||||
string taskRunnerId,
|
||||
DateTimeOffset leaseUntil,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Extends an existing lease.
|
||||
/// Returns false if lease has expired or doesn't match.
|
||||
/// </summary>
|
||||
Task<bool> ExtendLeaseAsync(
|
||||
string tenantId,
|
||||
Guid packRunId,
|
||||
Guid leaseId,
|
||||
DateTimeOffset newLeaseUntil,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Releases a lease (on failure or timeout).
|
||||
/// </summary>
|
||||
Task ReleaseLeaseAsync(
|
||||
string tenantId,
|
||||
Guid packRunId,
|
||||
Guid leaseId,
|
||||
PackRunStatus newStatus,
|
||||
string? reason,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Lists pack runs with pagination and filters.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<PackRun>> ListAsync(
|
||||
string tenantId,
|
||||
string? packId,
|
||||
PackRunStatus? status,
|
||||
string? projectId,
|
||||
DateTimeOffset? createdAfter,
|
||||
DateTimeOffset? createdBefore,
|
||||
int limit,
|
||||
int offset,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Counts pack runs matching the filters.
|
||||
/// </summary>
|
||||
Task<int> CountAsync(
|
||||
string tenantId,
|
||||
string? packId,
|
||||
PackRunStatus? status,
|
||||
string? projectId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets pack runs with expired leases (for timeout handling).
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<PackRun>> GetExpiredLeasesAsync(
|
||||
DateTimeOffset cutoff,
|
||||
int limit,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Cancels pending pack runs matching the filters.
|
||||
/// Returns the count of canceled pack runs.
|
||||
/// </summary>
|
||||
Task<int> CancelPendingAsync(
|
||||
string tenantId,
|
||||
string? packId,
|
||||
string reason,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for pack run log persistence operations.
|
||||
/// </summary>
|
||||
public interface IPackRunLogRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Appends a single log entry.
|
||||
/// </summary>
|
||||
Task AppendAsync(PackRunLog log, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Appends a batch of log entries.
|
||||
/// </summary>
|
||||
Task AppendBatchAsync(IReadOnlyList<PackRunLog> logs, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets log entries for a pack run with cursor-based pagination.
|
||||
/// </summary>
|
||||
Task<PackRunLogBatch> GetLogsAsync(
|
||||
string tenantId,
|
||||
Guid packRunId,
|
||||
long afterSequence,
|
||||
int limit,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current log count and latest sequence for a pack run.
|
||||
/// </summary>
|
||||
Task<(long Count, long LatestSequence)> GetLogStatsAsync(
|
||||
string tenantId,
|
||||
Guid packRunId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets log entries matching a level filter.
|
||||
/// </summary>
|
||||
Task<PackRunLogBatch> GetLogsByLevelAsync(
|
||||
string tenantId,
|
||||
Guid packRunId,
|
||||
LogLevel minLevel,
|
||||
long afterSequence,
|
||||
int limit,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Searches log messages for a pattern (simple substring match).
|
||||
/// </summary>
|
||||
Task<PackRunLogBatch> SearchLogsAsync(
|
||||
string tenantId,
|
||||
Guid packRunId,
|
||||
string pattern,
|
||||
long afterSequence,
|
||||
int limit,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes all logs for a pack run (for cleanup/retention).
|
||||
/// </summary>
|
||||
Task<long> DeleteLogsAsync(
|
||||
string tenantId,
|
||||
Guid packRunId,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,911 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
using StellaOps.Orchestrator.Infrastructure.Events;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Events;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for event envelope and publishing infrastructure.
|
||||
/// </summary>
|
||||
public class EventPublishingTests
|
||||
{
|
||||
private static readonly CancellationToken CT = CancellationToken.None;
|
||||
|
||||
#region EventEnvelope Tests
|
||||
|
||||
[Fact]
|
||||
public void EventEnvelope_Create_GeneratesIdAndTimestamp()
|
||||
{
|
||||
var actor = EventActor.Service("test-service", "orch:read");
|
||||
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.JobCreated,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor);
|
||||
|
||||
Assert.NotNull(envelope.EventId);
|
||||
Assert.StartsWith("urn:orch:event:", envelope.EventId);
|
||||
Assert.Equal(EventEnvelope.CurrentSchemaVersion, envelope.SchemaVersion);
|
||||
Assert.Equal(OrchestratorEventType.JobCreated, envelope.EventType);
|
||||
Assert.Equal("tenant-1", envelope.TenantId);
|
||||
Assert.True(envelope.OccurredAt <= DateTimeOffset.UtcNow);
|
||||
Assert.NotNull(envelope.IdempotencyKey);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventEnvelope_ForJob_IncludesJobMetadata()
|
||||
{
|
||||
var actor = EventActor.Worker("worker-1", "go-sdk");
|
||||
var job = EventJob.Completed("job-123", "pack-run", 1);
|
||||
|
||||
var envelope = EventEnvelope.ForJob(
|
||||
eventType: OrchestratorEventType.JobCompleted,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor,
|
||||
job: job,
|
||||
correlationId: "corr-456",
|
||||
projectId: "proj-789");
|
||||
|
||||
Assert.NotNull(envelope.Job);
|
||||
Assert.Equal("job-123", envelope.Job!.Id);
|
||||
Assert.Equal("pack-run", envelope.Job.Type);
|
||||
Assert.Equal("completed", envelope.Job.Status);
|
||||
Assert.Equal(1, envelope.Job.Attempt);
|
||||
Assert.Equal("corr-456", envelope.CorrelationId);
|
||||
Assert.Equal("proj-789", envelope.ProjectId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventEnvelope_ForExport_CreatesExportEnvelope()
|
||||
{
|
||||
var actor = EventActor.System("scheduler");
|
||||
var exportJob = EventJob.Create("exp-123", "export.sbom", "running", attempt: 1);
|
||||
|
||||
var envelope = EventEnvelope.ForExport(
|
||||
eventType: OrchestratorEventType.ExportStarted,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor,
|
||||
exportJob: exportJob);
|
||||
|
||||
Assert.Equal(OrchestratorEventType.ExportStarted, envelope.EventType);
|
||||
Assert.NotNull(envelope.Job);
|
||||
Assert.Equal("exp-123", envelope.Job!.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventEnvelope_ForPolicy_CreatesPolicyEnvelope()
|
||||
{
|
||||
var actor = EventActor.User("admin@example.com", "policy:write");
|
||||
|
||||
var envelope = EventEnvelope.ForPolicy(
|
||||
eventType: OrchestratorEventType.PolicyUpdated,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor,
|
||||
projectId: "proj-1");
|
||||
|
||||
Assert.Equal(OrchestratorEventType.PolicyUpdated, envelope.EventType);
|
||||
Assert.Null(envelope.Job);
|
||||
Assert.Equal("proj-1", envelope.ProjectId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventEnvelope_ToJson_SerializesCorrectly()
|
||||
{
|
||||
var actor = EventActor.Service("test-service");
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.AlertCreated,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor);
|
||||
|
||||
var json = envelope.ToJson();
|
||||
|
||||
Assert.NotNull(json);
|
||||
Assert.Contains("schemaVersion", json);
|
||||
Assert.Contains("eventId", json);
|
||||
Assert.Contains("eventType", json);
|
||||
Assert.Contains("tenantId", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventEnvelope_FromJson_DeserializesCorrectly()
|
||||
{
|
||||
var actor = EventActor.Service("test-service");
|
||||
var original = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.ScheduleTriggered,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor,
|
||||
projectId: "proj-1");
|
||||
|
||||
var json = original.ToJson();
|
||||
var restored = EventEnvelope.FromJson(json);
|
||||
|
||||
Assert.NotNull(restored);
|
||||
Assert.Equal(original.EventId, restored!.EventId);
|
||||
Assert.Equal(original.EventType, restored.EventType);
|
||||
Assert.Equal(original.TenantId, restored.TenantId);
|
||||
Assert.Equal(original.ProjectId, restored.ProjectId);
|
||||
Assert.Equal(original.IdempotencyKey, restored.IdempotencyKey);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventEnvelope_FromJson_ReturnsNullForInvalidJson()
|
||||
{
|
||||
var result = EventEnvelope.FromJson("not valid json");
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventEnvelope_ComputeDigest_ReturnsSha256Hash()
|
||||
{
|
||||
var actor = EventActor.Service("test-service");
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.JobCreated,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor);
|
||||
|
||||
var digest = envelope.ComputeDigest();
|
||||
|
||||
Assert.StartsWith("sha256:", digest);
|
||||
Assert.Equal(64 + 7, digest.Length); // "sha256:" + 64 hex chars
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventEnvelope_GenerateIdempotencyKey_IsDeterministic()
|
||||
{
|
||||
var key1 = EventEnvelope.GenerateIdempotencyKey(OrchestratorEventType.JobCompleted, "job-123", 2);
|
||||
var key2 = EventEnvelope.GenerateIdempotencyKey(OrchestratorEventType.JobCompleted, "job-123", 2);
|
||||
|
||||
Assert.Equal(key1, key2);
|
||||
Assert.Equal("orch-job.completed-job-123-2", key1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventEnvelope_GenerateIdempotencyKey_DiffersForDifferentAttempts()
|
||||
{
|
||||
var key1 = EventEnvelope.GenerateIdempotencyKey(OrchestratorEventType.JobCompleted, "job-123", 1);
|
||||
var key2 = EventEnvelope.GenerateIdempotencyKey(OrchestratorEventType.JobCompleted, "job-123", 2);
|
||||
|
||||
Assert.NotEqual(key1, key2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EventActor Tests
|
||||
|
||||
[Fact]
|
||||
public void EventActor_Service_CreatesServiceActor()
|
||||
{
|
||||
var actor = EventActor.Service("orchestrator", "orch:admin", "orch:write");
|
||||
|
||||
Assert.Equal("service/orchestrator", actor.Subject);
|
||||
Assert.NotNull(actor.Scopes);
|
||||
Assert.Equal(2, actor.Scopes!.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventActor_User_CreatesUserActor()
|
||||
{
|
||||
var actor = EventActor.User("admin@example.com", "export:create");
|
||||
|
||||
Assert.Equal("user/admin@example.com", actor.Subject);
|
||||
Assert.Single(actor.Scopes!);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventActor_System_CreatesSystemActor()
|
||||
{
|
||||
var actor = EventActor.System("scheduler");
|
||||
|
||||
Assert.Equal("system/scheduler", actor.Subject);
|
||||
Assert.Null(actor.Scopes);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventActor_Worker_CreatesWorkerActor()
|
||||
{
|
||||
var actor = EventActor.Worker("worker-abc", "python-sdk");
|
||||
|
||||
Assert.Equal("worker/python-sdk/worker-abc", actor.Subject);
|
||||
Assert.Null(actor.Scopes);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EventJob Tests
|
||||
|
||||
[Fact]
|
||||
public void EventJob_Create_CreatesJobMetadata()
|
||||
{
|
||||
var job = EventJob.Create(
|
||||
id: "job-123",
|
||||
type: "ingest",
|
||||
status: "running",
|
||||
attempt: 2,
|
||||
runId: "run-456",
|
||||
leaseId: "lease-789");
|
||||
|
||||
Assert.Equal("job-123", job.Id);
|
||||
Assert.Equal("ingest", job.Type);
|
||||
Assert.Equal("running", job.Status);
|
||||
Assert.Equal(2, job.Attempt);
|
||||
Assert.Equal("run-456", job.RunId);
|
||||
Assert.Equal("lease-789", job.LeaseId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventJob_Completed_SetsCompletedStatus()
|
||||
{
|
||||
var job = EventJob.Completed("job-123", "export", 1, payloadDigest: "sha256:abc");
|
||||
|
||||
Assert.Equal("completed", job.Status);
|
||||
Assert.Equal("sha256:abc", job.PayloadDigest);
|
||||
Assert.Null(job.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventJob_Failed_SetsFailedStatusWithReason()
|
||||
{
|
||||
var job = EventJob.Failed("job-123", "export", 2, "Connection timeout");
|
||||
|
||||
Assert.Equal("failed", job.Status);
|
||||
Assert.Equal("Connection timeout", job.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventJob_Canceled_SetsCanceledStatusWithReason()
|
||||
{
|
||||
var job = EventJob.Canceled("job-123", "export", 1, "User requested cancellation");
|
||||
|
||||
Assert.Equal("canceled", job.Status);
|
||||
Assert.Equal("User requested cancellation", job.Reason);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EventMetrics Tests
|
||||
|
||||
[Fact]
|
||||
public void EventMetrics_WithDuration_CreatesDurationMetrics()
|
||||
{
|
||||
var metrics = EventMetrics.WithDuration(45.5);
|
||||
|
||||
Assert.Equal(45.5, metrics.DurationSeconds);
|
||||
Assert.Null(metrics.QueueWaitSeconds);
|
||||
Assert.Null(metrics.ProcessingSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventMetrics_WithBreakdown_CreatesDetailedMetrics()
|
||||
{
|
||||
var metrics = EventMetrics.WithBreakdown(total: 100.0, queueWait: 20.0, processing: 80.0);
|
||||
|
||||
Assert.Equal(100.0, metrics.DurationSeconds);
|
||||
Assert.Equal(20.0, metrics.QueueWaitSeconds);
|
||||
Assert.Equal(80.0, metrics.ProcessingSeconds);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EventNotifier Tests
|
||||
|
||||
[Fact]
|
||||
public void EventNotifier_JobsChannel_CreatesJobsNotifier()
|
||||
{
|
||||
var notifier = EventNotifier.JobsChannel();
|
||||
|
||||
Assert.Equal("orch.jobs", notifier.Channel);
|
||||
Assert.Equal("dsse", notifier.Delivery);
|
||||
Assert.Null(notifier.Replay);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventNotifier_ExportsChannel_CreatesExportsNotifier()
|
||||
{
|
||||
var notifier = EventNotifier.ExportsChannel("raw");
|
||||
|
||||
Assert.Equal("orch.exports", notifier.Channel);
|
||||
Assert.Equal("raw", notifier.Delivery);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventNotifier_PolicyChannel_CreatesPolicyNotifier()
|
||||
{
|
||||
var notifier = EventNotifier.PolicyChannel();
|
||||
|
||||
Assert.Equal("orch.policy", notifier.Channel);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EventNotifier_WithReplay_AddsReplayMetadata()
|
||||
{
|
||||
var notifier = EventNotifier.JobsChannel().WithReplay(5, 10);
|
||||
|
||||
Assert.NotNull(notifier.Replay);
|
||||
Assert.Equal(5, notifier.Replay!.Ordinal);
|
||||
Assert.Equal(10, notifier.Replay.Total);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region OrchestratorEventType Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(OrchestratorEventType.JobCreated, "job.created")]
|
||||
[InlineData(OrchestratorEventType.JobCompleted, "job.completed")]
|
||||
[InlineData(OrchestratorEventType.JobFailed, "job.failed")]
|
||||
[InlineData(OrchestratorEventType.ExportCreated, "export.created")]
|
||||
[InlineData(OrchestratorEventType.ExportCompleted, "export.completed")]
|
||||
[InlineData(OrchestratorEventType.ScheduleTriggered, "schedule.triggered")]
|
||||
[InlineData(OrchestratorEventType.PolicyUpdated, "policy.updated")]
|
||||
[InlineData(OrchestratorEventType.PackRunCompleted, "pack_run.completed")]
|
||||
public void OrchestratorEventType_ToEventTypeName_ReturnsCanonicalName(
|
||||
OrchestratorEventType eventType, string expectedName)
|
||||
{
|
||||
Assert.Equal(expectedName, eventType.ToEventTypeName());
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("job.created", OrchestratorEventType.JobCreated)]
|
||||
[InlineData("job.completed", OrchestratorEventType.JobCompleted)]
|
||||
[InlineData("export.failed", OrchestratorEventType.ExportFailed)]
|
||||
[InlineData("schedule.enabled", OrchestratorEventType.ScheduleEnabled)]
|
||||
[InlineData("pack_run.started", OrchestratorEventType.PackRunStarted)]
|
||||
public void OrchestratorEventType_FromEventTypeName_ParsesCanonicalName(
|
||||
string name, OrchestratorEventType expected)
|
||||
{
|
||||
Assert.Equal(expected, OrchestratorEventTypeExtensions.FromEventTypeName(name));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void OrchestratorEventType_FromEventTypeName_ReturnsNullForUnknown()
|
||||
{
|
||||
Assert.Null(OrchestratorEventTypeExtensions.FromEventTypeName("unknown.event"));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(OrchestratorEventType.JobFailed, true)]
|
||||
[InlineData(OrchestratorEventType.ExportFailed, true)]
|
||||
[InlineData(OrchestratorEventType.PackRunFailed, true)]
|
||||
[InlineData(OrchestratorEventType.JobCompleted, false)]
|
||||
[InlineData(OrchestratorEventType.ExportCreated, false)]
|
||||
public void OrchestratorEventType_IsFailure_IdentifiesFailures(
|
||||
OrchestratorEventType eventType, bool isFailure)
|
||||
{
|
||||
Assert.Equal(isFailure, eventType.IsFailure());
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(OrchestratorEventType.JobCompleted, true)]
|
||||
[InlineData(OrchestratorEventType.ExportCompleted, true)]
|
||||
[InlineData(OrchestratorEventType.PackRunCompleted, true)]
|
||||
[InlineData(OrchestratorEventType.RetentionPruneCompleted, true)]
|
||||
[InlineData(OrchestratorEventType.JobFailed, false)]
|
||||
[InlineData(OrchestratorEventType.JobCreated, false)]
|
||||
public void OrchestratorEventType_IsCompletion_IdentifiesCompletions(
|
||||
OrchestratorEventType eventType, bool isCompletion)
|
||||
{
|
||||
Assert.Equal(isCompletion, eventType.IsCompletion());
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(OrchestratorEventType.JobCompleted, true)]
|
||||
[InlineData(OrchestratorEventType.JobFailed, true)]
|
||||
[InlineData(OrchestratorEventType.JobCanceled, true)]
|
||||
[InlineData(OrchestratorEventType.ExportDeleted, true)]
|
||||
[InlineData(OrchestratorEventType.AlertResolved, true)]
|
||||
[InlineData(OrchestratorEventType.JobCreated, false)]
|
||||
[InlineData(OrchestratorEventType.JobStarted, false)]
|
||||
[InlineData(OrchestratorEventType.AlertCreated, false)]
|
||||
public void OrchestratorEventType_IsTerminal_IdentifiesTerminalEvents(
|
||||
OrchestratorEventType eventType, bool isTerminal)
|
||||
{
|
||||
Assert.Equal(isTerminal, eventType.IsTerminal());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region InMemoryIdempotencyStore Tests
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryIdempotencyStore_TryMark_ReturnsTrueForNewKey()
|
||||
{
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
|
||||
var result = await store.TryMarkAsync("key-1", TimeSpan.FromMinutes(5), CT);
|
||||
|
||||
Assert.True(result);
|
||||
Assert.Equal(1, store.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryIdempotencyStore_TryMark_ReturnsFalseForExistingKey()
|
||||
{
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
await store.TryMarkAsync("key-1", TimeSpan.FromMinutes(5), CT);
|
||||
|
||||
var result = await store.TryMarkAsync("key-1", TimeSpan.FromMinutes(5), CT);
|
||||
|
||||
Assert.False(result);
|
||||
Assert.Equal(1, store.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryIdempotencyStore_Exists_ReturnsTrueForExisting()
|
||||
{
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
await store.TryMarkAsync("key-1", TimeSpan.FromMinutes(5), CT);
|
||||
|
||||
Assert.True(await store.ExistsAsync("key-1", CT));
|
||||
Assert.False(await store.ExistsAsync("key-2", CT));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryIdempotencyStore_Remove_RemovesKey()
|
||||
{
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
await store.TryMarkAsync("key-1", TimeSpan.FromMinutes(5), CT);
|
||||
|
||||
await store.RemoveAsync("key-1", CT);
|
||||
|
||||
Assert.False(await store.ExistsAsync("key-1", CT));
|
||||
Assert.Equal(0, store.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryIdempotencyStore_Clear_RemovesAllKeys()
|
||||
{
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
await store.TryMarkAsync("key-1", TimeSpan.FromMinutes(5), CT);
|
||||
await store.TryMarkAsync("key-2", TimeSpan.FromMinutes(5), CT);
|
||||
|
||||
store.Clear();
|
||||
|
||||
Assert.Equal(0, store.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryIdempotencyStore_ExpiresKeys()
|
||||
{
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
await store.TryMarkAsync("key-1", TimeSpan.FromMilliseconds(1), CT);
|
||||
|
||||
await Task.Delay(10, CT);
|
||||
|
||||
// Key should be cleaned up on next operation
|
||||
Assert.False(await store.ExistsAsync("key-1", CT));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region NullNotifierBus Tests
|
||||
|
||||
[Fact]
|
||||
public async Task NullNotifierBus_Send_RecordsMessage()
|
||||
{
|
||||
var bus = NullNotifierBus.Instance;
|
||||
bus.Clear();
|
||||
|
||||
await bus.SendAsync("orch.jobs", "test-message", CT);
|
||||
|
||||
var messages = bus.GetMessages();
|
||||
Assert.Single(messages);
|
||||
Assert.Equal("orch.jobs", messages[0].Channel);
|
||||
Assert.Equal("test-message", messages[0].Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NullNotifierBus_SendBatch_RecordsAllMessages()
|
||||
{
|
||||
var bus = NullNotifierBus.Instance;
|
||||
bus.Clear();
|
||||
|
||||
await bus.SendBatchAsync("orch.exports", new[] { "msg1", "msg2", "msg3" }, CT);
|
||||
|
||||
var messages = bus.GetMessages("orch.exports");
|
||||
Assert.Equal(3, messages.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NullNotifierBus_GetMessages_FiltersByChannel()
|
||||
{
|
||||
var bus = NullNotifierBus.Instance;
|
||||
bus.Clear();
|
||||
|
||||
await bus.SendAsync("orch.jobs", "job-msg", CT);
|
||||
await bus.SendAsync("orch.exports", "export-msg", CT);
|
||||
|
||||
var jobMessages = bus.GetMessages("orch.jobs");
|
||||
var exportMessages = bus.GetMessages("orch.exports");
|
||||
|
||||
Assert.Single(jobMessages);
|
||||
Assert.Single(exportMessages);
|
||||
Assert.Equal("job-msg", jobMessages[0]);
|
||||
Assert.Equal("export-msg", exportMessages[0]);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region NullEventSigner Tests
|
||||
|
||||
[Fact]
|
||||
public async Task NullEventSigner_Sign_ReturnsDsseFormat()
|
||||
{
|
||||
var signer = NullEventSigner.Instance;
|
||||
var actor = EventActor.Service("test");
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.JobCreated,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor);
|
||||
|
||||
var signed = await signer.SignAsync(envelope, CT);
|
||||
|
||||
Assert.Contains("payloadType", signed);
|
||||
Assert.Contains("application/vnd.orch.event+json", signed);
|
||||
Assert.Contains("payload", signed);
|
||||
Assert.Contains("signatures", signed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NullEventSigner_Verify_ExtractsEnvelope()
|
||||
{
|
||||
var signer = NullEventSigner.Instance;
|
||||
var actor = EventActor.Service("test");
|
||||
var original = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.ExportCompleted,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor,
|
||||
projectId: "proj-1");
|
||||
|
||||
var signed = await signer.SignAsync(original, CT);
|
||||
var verified = await signer.VerifyAsync(signed, CT);
|
||||
|
||||
Assert.NotNull(verified);
|
||||
Assert.Equal(original.EventType, verified!.EventType);
|
||||
Assert.Equal(original.TenantId, verified.TenantId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NullEventSigner_Verify_ReturnsNullForInvalidPayload()
|
||||
{
|
||||
var signer = NullEventSigner.Instance;
|
||||
|
||||
var result = await signer.VerifyAsync("invalid json", CT);
|
||||
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region NullEventPublisher Tests
|
||||
|
||||
[Fact]
|
||||
public async Task NullEventPublisher_Publish_ReturnsTrue()
|
||||
{
|
||||
var publisher = NullEventPublisher.Instance;
|
||||
var actor = EventActor.Service("test");
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.JobCreated,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor);
|
||||
|
||||
var result = await publisher.PublishAsync(envelope, CT);
|
||||
|
||||
Assert.True(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NullEventPublisher_PublishBatch_ReturnsCorrectCount()
|
||||
{
|
||||
var publisher = NullEventPublisher.Instance;
|
||||
var actor = EventActor.Service("test");
|
||||
var envelopes = Enumerable.Range(1, 5).Select(i =>
|
||||
EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.JobCreated,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor));
|
||||
|
||||
var result = await publisher.PublishBatchAsync(envelopes, CT);
|
||||
|
||||
Assert.Equal(5, result.Published);
|
||||
Assert.Equal(0, result.Deduplicated);
|
||||
Assert.Equal(0, result.Failed);
|
||||
Assert.False(result.HasErrors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NullEventPublisher_IsPublished_ReturnsFalse()
|
||||
{
|
||||
var publisher = NullEventPublisher.Instance;
|
||||
|
||||
var result = await publisher.IsPublishedAsync("any-key", CT);
|
||||
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region OrchestratorEventPublisher Tests
|
||||
|
||||
[Fact]
|
||||
public async Task OrchestratorEventPublisher_Publish_PublishesToBus()
|
||||
{
|
||||
var bus = NullNotifierBus.Instance;
|
||||
bus.Clear();
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
var options = Options.Create(EventPublishOptions.Default with { SignWithDsse = false });
|
||||
var publisher = new OrchestratorEventPublisher(
|
||||
store, bus, options, NullLogger<OrchestratorEventPublisher>.Instance);
|
||||
|
||||
var actor = EventActor.Service("test");
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.JobCompleted,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor);
|
||||
|
||||
var result = await publisher.PublishAsync(envelope, CT);
|
||||
|
||||
Assert.True(result);
|
||||
var messages = bus.GetMessages("orch.jobs");
|
||||
Assert.Single(messages);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OrchestratorEventPublisher_Publish_DeduplicatesByIdempotencyKey()
|
||||
{
|
||||
var bus = NullNotifierBus.Instance;
|
||||
bus.Clear();
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
var options = Options.Create(EventPublishOptions.Default with { SignWithDsse = false });
|
||||
var publisher = new OrchestratorEventPublisher(
|
||||
store, bus, options, NullLogger<OrchestratorEventPublisher>.Instance);
|
||||
|
||||
var actor = EventActor.Service("test");
|
||||
var job = EventJob.Completed("job-1", "test", 1);
|
||||
var envelope = EventEnvelope.ForJob(
|
||||
eventType: OrchestratorEventType.JobCompleted,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor,
|
||||
job: job);
|
||||
|
||||
var result1 = await publisher.PublishAsync(envelope, CT);
|
||||
var result2 = await publisher.PublishAsync(envelope, CT);
|
||||
|
||||
Assert.True(result1);
|
||||
Assert.False(result2);
|
||||
Assert.Single(bus.GetMessages("orch.jobs"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OrchestratorEventPublisher_Publish_SignsWithDsse()
|
||||
{
|
||||
var bus = NullNotifierBus.Instance;
|
||||
bus.Clear();
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
var signer = NullEventSigner.Instance;
|
||||
var options = Options.Create(EventPublishOptions.Default with { SignWithDsse = true });
|
||||
var publisher = new OrchestratorEventPublisher(
|
||||
store, bus, options, NullLogger<OrchestratorEventPublisher>.Instance, signer);
|
||||
|
||||
var actor = EventActor.Service("test");
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.PolicyUpdated,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor);
|
||||
|
||||
await publisher.PublishAsync(envelope, CT);
|
||||
|
||||
var messages = bus.GetMessages("orch.policy");
|
||||
Assert.Single(messages);
|
||||
Assert.Contains("payloadType", messages[0]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OrchestratorEventPublisher_Publish_RoutesToCorrectChannel()
|
||||
{
|
||||
var bus = NullNotifierBus.Instance;
|
||||
bus.Clear();
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
var options = Options.Create(EventPublishOptions.Default with { SignWithDsse = false });
|
||||
var publisher = new OrchestratorEventPublisher(
|
||||
store, bus, options, NullLogger<OrchestratorEventPublisher>.Instance);
|
||||
|
||||
var actor = EventActor.Service("test");
|
||||
|
||||
// Export event
|
||||
await publisher.PublishAsync(EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.ExportCreated,
|
||||
tenantId: "t1",
|
||||
actor: actor), CT);
|
||||
|
||||
// Policy event
|
||||
await publisher.PublishAsync(EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.PolicyUpdated,
|
||||
tenantId: "t1",
|
||||
actor: actor), CT);
|
||||
|
||||
// Schedule event
|
||||
await publisher.PublishAsync(EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.ScheduleTriggered,
|
||||
tenantId: "t1",
|
||||
actor: actor), CT);
|
||||
|
||||
// Alert event
|
||||
await publisher.PublishAsync(EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.AlertCreated,
|
||||
tenantId: "t1",
|
||||
actor: actor), CT);
|
||||
|
||||
// Pack run event
|
||||
await publisher.PublishAsync(EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.PackRunStarted,
|
||||
tenantId: "t1",
|
||||
actor: actor), CT);
|
||||
|
||||
Assert.Single(bus.GetMessages("orch.exports"));
|
||||
Assert.Single(bus.GetMessages("orch.policy"));
|
||||
Assert.Single(bus.GetMessages("orch.schedules"));
|
||||
Assert.Single(bus.GetMessages("orch.alerts"));
|
||||
Assert.Single(bus.GetMessages("orch.pack_runs"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OrchestratorEventPublisher_Publish_UsesCustomChannel()
|
||||
{
|
||||
var bus = NullNotifierBus.Instance;
|
||||
bus.Clear();
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
var options = Options.Create(EventPublishOptions.Default with { SignWithDsse = false });
|
||||
var publisher = new OrchestratorEventPublisher(
|
||||
store, bus, options, NullLogger<OrchestratorEventPublisher>.Instance);
|
||||
|
||||
var actor = EventActor.Service("test");
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.JobCompleted,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor,
|
||||
notifier: new EventNotifier("custom.channel", "raw", null));
|
||||
|
||||
await publisher.PublishAsync(envelope, CT);
|
||||
|
||||
Assert.Single(bus.GetMessages("custom.channel"));
|
||||
Assert.Empty(bus.GetMessages("orch.jobs"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OrchestratorEventPublisher_IsPublished_ChecksIdempotencyStore()
|
||||
{
|
||||
var bus = NullNotifierBus.Instance;
|
||||
bus.Clear();
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
var options = Options.Create(EventPublishOptions.Default with { SignWithDsse = false });
|
||||
var publisher = new OrchestratorEventPublisher(
|
||||
store, bus, options, NullLogger<OrchestratorEventPublisher>.Instance);
|
||||
|
||||
var actor = EventActor.Service("test");
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.JobCompleted,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor);
|
||||
|
||||
Assert.False(await publisher.IsPublishedAsync(envelope.IdempotencyKey, CT));
|
||||
|
||||
await publisher.PublishAsync(envelope, CT);
|
||||
|
||||
Assert.True(await publisher.IsPublishedAsync(envelope.IdempotencyKey, CT));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OrchestratorEventPublisher_PublishBatch_ReturnsCorrectCounts()
|
||||
{
|
||||
var bus = NullNotifierBus.Instance;
|
||||
bus.Clear();
|
||||
var store = new InMemoryIdempotencyStore();
|
||||
var options = Options.Create(EventPublishOptions.Default with { SignWithDsse = false });
|
||||
var publisher = new OrchestratorEventPublisher(
|
||||
store, bus, options, NullLogger<OrchestratorEventPublisher>.Instance);
|
||||
|
||||
var actor = EventActor.Service("test");
|
||||
var job = EventJob.Completed("job-1", "test", 1);
|
||||
var envelope = EventEnvelope.ForJob(
|
||||
eventType: OrchestratorEventType.JobCompleted,
|
||||
tenantId: "tenant-1",
|
||||
actor: actor,
|
||||
job: job);
|
||||
|
||||
// First batch - all new
|
||||
var result1 = await publisher.PublishBatchAsync(new[] { envelope }, CT);
|
||||
Assert.Equal(1, result1.Published);
|
||||
Assert.Equal(0, result1.Deduplicated);
|
||||
|
||||
// Second batch - all duplicates
|
||||
var result2 = await publisher.PublishBatchAsync(new[] { envelope }, CT);
|
||||
Assert.Equal(0, result2.Published);
|
||||
Assert.Equal(1, result2.Deduplicated);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region BatchPublishResult Tests
|
||||
|
||||
[Fact]
|
||||
public void BatchPublishResult_Total_ReturnsSum()
|
||||
{
|
||||
var result = new BatchPublishResult(10, 5, 2, new List<string>());
|
||||
|
||||
Assert.Equal(17, result.Total);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BatchPublishResult_HasPublished_TrueWhenPublished()
|
||||
{
|
||||
var result1 = new BatchPublishResult(1, 0, 0, new List<string>());
|
||||
var result2 = new BatchPublishResult(0, 1, 0, new List<string>());
|
||||
|
||||
Assert.True(result1.HasPublished);
|
||||
Assert.False(result2.HasPublished);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BatchPublishResult_HasErrors_TrueWhenFailedOrErrors()
|
||||
{
|
||||
var result1 = new BatchPublishResult(0, 0, 1, new List<string>());
|
||||
var result2 = new BatchPublishResult(0, 0, 0, new List<string> { "error" });
|
||||
var result3 = new BatchPublishResult(1, 0, 0, new List<string>());
|
||||
|
||||
Assert.True(result1.HasErrors);
|
||||
Assert.True(result2.HasErrors);
|
||||
Assert.False(result3.HasErrors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BatchPublishResult_Empty_ReturnsZeros()
|
||||
{
|
||||
var result = BatchPublishResult.Empty;
|
||||
|
||||
Assert.Equal(0, result.Published);
|
||||
Assert.Equal(0, result.Deduplicated);
|
||||
Assert.Equal(0, result.Failed);
|
||||
Assert.Empty(result.Errors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BatchPublishResult_SingleSuccess_ReturnsOne()
|
||||
{
|
||||
var result = BatchPublishResult.SingleSuccess;
|
||||
|
||||
Assert.Equal(1, result.Published);
|
||||
Assert.Equal(0, result.Deduplicated);
|
||||
Assert.False(result.HasErrors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BatchPublishResult_SingleDeduplicated_ReturnsOneDeduplicated()
|
||||
{
|
||||
var result = BatchPublishResult.SingleDeduplicated;
|
||||
|
||||
Assert.Equal(0, result.Published);
|
||||
Assert.Equal(1, result.Deduplicated);
|
||||
Assert.False(result.HasErrors);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EventPublishOptions Tests
|
||||
|
||||
[Fact]
|
||||
public void EventPublishOptions_Default_HasExpectedValues()
|
||||
{
|
||||
var options = EventPublishOptions.Default;
|
||||
|
||||
Assert.True(options.SignWithDsse);
|
||||
Assert.Equal(3, options.MaxRetries);
|
||||
Assert.Equal(TimeSpan.FromSeconds(1), options.RetryDelay);
|
||||
Assert.Equal(TimeSpan.FromHours(24), options.IdempotencyTtl);
|
||||
Assert.True(options.IncludeProvenance);
|
||||
Assert.True(options.CompressLargePayloads);
|
||||
Assert.Equal(64 * 1024, options.CompressionThreshold);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,159 @@
|
||||
using StellaOps.Orchestrator.Core.Domain.Export;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for ExportDistribution metadata.
|
||||
/// </summary>
|
||||
public sealed class ExportDistributionTests
|
||||
{
|
||||
[Fact]
|
||||
public void ToJson_SerializesCorrectly()
|
||||
{
|
||||
var distribution = new ExportDistribution(
|
||||
PrimaryUri: "s3://bucket/exports/export-001.json.gz",
|
||||
DownloadUrl: "https://cdn.example.com/exports/export-001.json.gz?token=abc",
|
||||
DownloadUrlExpiresAt: new DateTimeOffset(2024, 12, 31, 23, 59, 59, TimeSpan.Zero),
|
||||
StorageProvider: "s3",
|
||||
Region: "us-east-1",
|
||||
StorageTier: "hot",
|
||||
Replicas: null,
|
||||
ReplicationStatus: null,
|
||||
ContentType: "application/gzip",
|
||||
AccessList: null,
|
||||
IsPublic: false,
|
||||
CreatedAt: new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero));
|
||||
|
||||
var json = distribution.ToJson();
|
||||
|
||||
Assert.Contains("\"primaryUri\":\"s3://bucket/exports/export-001.json.gz\"", json);
|
||||
Assert.Contains("\"storageProvider\":\"s3\"", json);
|
||||
Assert.Contains("\"storageTier\":\"hot\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromJson_DeserializesCorrectly()
|
||||
{
|
||||
var original = new ExportDistribution(
|
||||
PrimaryUri: "s3://bucket/test.json",
|
||||
DownloadUrl: null,
|
||||
DownloadUrlExpiresAt: null,
|
||||
StorageProvider: "s3",
|
||||
Region: "eu-west-1",
|
||||
StorageTier: "cool",
|
||||
Replicas: null,
|
||||
ReplicationStatus: null,
|
||||
ContentType: "application/json",
|
||||
AccessList: null,
|
||||
IsPublic: true,
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var json = original.ToJson();
|
||||
var deserialized = ExportDistribution.FromJson(json);
|
||||
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(original.PrimaryUri, deserialized.PrimaryUri);
|
||||
Assert.Equal(original.StorageProvider, deserialized.StorageProvider);
|
||||
Assert.Equal(original.Region, deserialized.Region);
|
||||
Assert.Equal(original.IsPublic, deserialized.IsPublic);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromJson_ReturnsNullForInvalidJson()
|
||||
{
|
||||
var result = ExportDistribution.FromJson("not valid json");
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithDownloadUrl_SetsUrlAndExpiration()
|
||||
{
|
||||
var distribution = new ExportDistribution(
|
||||
PrimaryUri: "s3://bucket/test.json",
|
||||
DownloadUrl: null,
|
||||
DownloadUrlExpiresAt: null,
|
||||
StorageProvider: "s3",
|
||||
Region: null,
|
||||
StorageTier: "hot",
|
||||
Replicas: null,
|
||||
ReplicationStatus: null,
|
||||
ContentType: "application/json",
|
||||
AccessList: null,
|
||||
IsPublic: false,
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var beforeUpdate = DateTimeOffset.UtcNow;
|
||||
var updated = distribution.WithDownloadUrl("https://download.example.com/test.json", TimeSpan.FromHours(1));
|
||||
var afterUpdate = DateTimeOffset.UtcNow.AddHours(1);
|
||||
|
||||
Assert.Equal("https://download.example.com/test.json", updated.DownloadUrl);
|
||||
Assert.NotNull(updated.DownloadUrlExpiresAt);
|
||||
Assert.True(updated.DownloadUrlExpiresAt >= beforeUpdate.AddHours(1).AddSeconds(-1));
|
||||
Assert.True(updated.DownloadUrlExpiresAt <= afterUpdate);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithReplica_AddsReplicaToEmptyDistribution()
|
||||
{
|
||||
var distribution = new ExportDistribution(
|
||||
PrimaryUri: "s3://primary/test.json",
|
||||
DownloadUrl: null,
|
||||
DownloadUrlExpiresAt: null,
|
||||
StorageProvider: "s3",
|
||||
Region: null,
|
||||
StorageTier: "hot",
|
||||
Replicas: null,
|
||||
ReplicationStatus: null,
|
||||
ContentType: "application/json",
|
||||
AccessList: null,
|
||||
IsPublic: false,
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var updated = distribution.WithReplica("backup", "s3://backup/test.json", ReplicationStatus.Completed);
|
||||
|
||||
Assert.NotNull(updated.Replicas);
|
||||
Assert.Single(updated.Replicas);
|
||||
Assert.Equal("s3://backup/test.json", updated.Replicas["backup"]);
|
||||
Assert.NotNull(updated.ReplicationStatus);
|
||||
Assert.Equal(ReplicationStatus.Completed, updated.ReplicationStatus["backup"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithReplica_AddsMultipleReplicas()
|
||||
{
|
||||
var distribution = new ExportDistribution(
|
||||
PrimaryUri: "s3://primary/test.json",
|
||||
DownloadUrl: null,
|
||||
DownloadUrlExpiresAt: null,
|
||||
StorageProvider: "s3",
|
||||
Region: null,
|
||||
StorageTier: "hot",
|
||||
Replicas: null,
|
||||
ReplicationStatus: null,
|
||||
ContentType: "application/json",
|
||||
AccessList: null,
|
||||
IsPublic: false,
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var updated = distribution
|
||||
.WithReplica("backup1", "s3://backup1/test.json", ReplicationStatus.Completed)
|
||||
.WithReplica("backup2", "s3://backup2/test.json", ReplicationStatus.InProgress);
|
||||
|
||||
Assert.NotNull(updated.Replicas);
|
||||
Assert.Equal(2, updated.Replicas.Count);
|
||||
Assert.Equal("s3://backup1/test.json", updated.Replicas["backup1"]);
|
||||
Assert.Equal("s3://backup2/test.json", updated.Replicas["backup2"]);
|
||||
Assert.Equal(ReplicationStatus.InProgress, updated.ReplicationStatus!["backup2"]);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(ReplicationStatus.Pending)]
|
||||
[InlineData(ReplicationStatus.InProgress)]
|
||||
[InlineData(ReplicationStatus.Completed)]
|
||||
[InlineData(ReplicationStatus.Failed)]
|
||||
[InlineData(ReplicationStatus.Skipped)]
|
||||
public void ReplicationStatus_AllValuesAreValid(ReplicationStatus status)
|
||||
{
|
||||
Assert.True(Enum.IsDefined(status));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,133 @@
|
||||
using StellaOps.Orchestrator.Core.Domain.Export;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for ExportJobPayload serialization and validation.
|
||||
/// </summary>
|
||||
public sealed class ExportJobPayloadTests
|
||||
{
|
||||
[Fact]
|
||||
public void Default_CreatesPayloadWithFormat()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
|
||||
Assert.Equal("json", payload.Format);
|
||||
Assert.Null(payload.StartTime);
|
||||
Assert.Null(payload.EndTime);
|
||||
Assert.Null(payload.SourceId);
|
||||
Assert.Null(payload.ProjectId);
|
||||
Assert.Null(payload.EntityIds);
|
||||
Assert.Null(payload.MaxEntries);
|
||||
Assert.True(payload.IncludeProvenance);
|
||||
Assert.True(payload.SignOutput);
|
||||
Assert.Null(payload.Compression);
|
||||
Assert.Null(payload.DestinationUri);
|
||||
Assert.Null(payload.CallbackUrl);
|
||||
Assert.Null(payload.Options);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("json")]
|
||||
[InlineData("ndjson")]
|
||||
[InlineData("csv")]
|
||||
[InlineData("spdx")]
|
||||
[InlineData("cyclonedx")]
|
||||
public void Default_SupportsDifferentFormats(string format)
|
||||
{
|
||||
var payload = ExportJobPayload.Default(format);
|
||||
Assert.Equal(format, payload.Format);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToJson_SerializesCorrectly()
|
||||
{
|
||||
var payload = new ExportJobPayload(
|
||||
Format: "json",
|
||||
StartTime: new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
EndTime: new DateTimeOffset(2024, 1, 31, 23, 59, 59, TimeSpan.Zero),
|
||||
SourceId: Guid.Parse("12345678-1234-1234-1234-123456789abc"),
|
||||
ProjectId: "project-1",
|
||||
EntityIds: [Guid.Parse("aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa")],
|
||||
MaxEntries: 1000,
|
||||
IncludeProvenance: true,
|
||||
SignOutput: false,
|
||||
Compression: "gzip",
|
||||
DestinationUri: "s3://bucket/exports/file.json.gz",
|
||||
CallbackUrl: "https://webhook.example.com/export-complete",
|
||||
Options: new Dictionary<string, string> { ["key"] = "value" });
|
||||
|
||||
var json = payload.ToJson();
|
||||
|
||||
Assert.Contains("\"format\":\"json\"", json);
|
||||
Assert.Contains("\"maxEntries\":1000", json);
|
||||
Assert.Contains("\"compression\":\"gzip\"", json);
|
||||
Assert.Contains("\"signOutput\":false", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromJson_DeserializesCorrectly()
|
||||
{
|
||||
var original = new ExportJobPayload(
|
||||
Format: "ndjson",
|
||||
StartTime: new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
EndTime: null,
|
||||
SourceId: Guid.Parse("12345678-1234-1234-1234-123456789abc"),
|
||||
ProjectId: null,
|
||||
EntityIds: null,
|
||||
MaxEntries: 500,
|
||||
IncludeProvenance: false,
|
||||
SignOutput: true,
|
||||
Compression: null,
|
||||
DestinationUri: null,
|
||||
CallbackUrl: null,
|
||||
Options: null);
|
||||
|
||||
var json = original.ToJson();
|
||||
var deserialized = ExportJobPayload.FromJson(json);
|
||||
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(original.Format, deserialized.Format);
|
||||
Assert.Equal(original.StartTime, deserialized.StartTime);
|
||||
Assert.Equal(original.SourceId, deserialized.SourceId);
|
||||
Assert.Equal(original.MaxEntries, deserialized.MaxEntries);
|
||||
Assert.Equal(original.IncludeProvenance, deserialized.IncludeProvenance);
|
||||
Assert.Equal(original.SignOutput, deserialized.SignOutput);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeDigest_ReturnsSha256Prefixed()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
var digest = payload.ComputeDigest();
|
||||
|
||||
Assert.StartsWith("sha256:", digest);
|
||||
Assert.Equal(71, digest.Length); // "sha256:" (7) + 64 hex chars
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeDigest_IsDeterministic()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
var digest1 = payload.ComputeDigest();
|
||||
var digest2 = payload.ComputeDigest();
|
||||
|
||||
Assert.Equal(digest1, digest2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeDigest_DifferentPayloadsHaveDifferentDigests()
|
||||
{
|
||||
var payload1 = ExportJobPayload.Default("json");
|
||||
var payload2 = ExportJobPayload.Default("ndjson");
|
||||
|
||||
Assert.NotEqual(payload1.ComputeDigest(), payload2.ComputeDigest());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromJson_ReturnsNullForInvalidJson()
|
||||
{
|
||||
var result = ExportJobPayload.FromJson("invalid json");
|
||||
Assert.Null(result);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,151 @@
|
||||
using StellaOps.Orchestrator.Core.Domain.Export;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for ExportJobPolicy defaults and rate limits.
|
||||
/// </summary>
|
||||
public sealed class ExportJobPolicyTests
|
||||
{
|
||||
[Fact]
|
||||
public void QuotaDefaults_HaveReasonableValues()
|
||||
{
|
||||
Assert.Equal(5, ExportJobPolicy.QuotaDefaults.MaxActive);
|
||||
Assert.Equal(50, ExportJobPolicy.QuotaDefaults.MaxPerHour);
|
||||
Assert.Equal(10, ExportJobPolicy.QuotaDefaults.BurstCapacity);
|
||||
Assert.Equal(0.5, ExportJobPolicy.QuotaDefaults.RefillRate);
|
||||
Assert.Equal(-10, ExportJobPolicy.QuotaDefaults.DefaultPriority);
|
||||
Assert.Equal(3, ExportJobPolicy.QuotaDefaults.MaxAttempts);
|
||||
Assert.Equal(600, ExportJobPolicy.QuotaDefaults.DefaultLeaseSeconds);
|
||||
Assert.Equal(3600, ExportJobPolicy.QuotaDefaults.MaxLeaseSeconds);
|
||||
Assert.Equal(60, ExportJobPolicy.QuotaDefaults.RecommendedHeartbeatInterval);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(ExportJobTypes.Ledger, 3, 30, 120)]
|
||||
[InlineData(ExportJobTypes.Sbom, 5, 100, 30)]
|
||||
[InlineData(ExportJobTypes.Vex, 5, 100, 30)]
|
||||
[InlineData(ExportJobTypes.ScanResults, 3, 50, 60)]
|
||||
[InlineData(ExportJobTypes.PolicyEvaluation, 3, 50, 60)]
|
||||
[InlineData(ExportJobTypes.Attestation, 2, 20, 180)]
|
||||
[InlineData(ExportJobTypes.PortableBundle, 1, 10, 600)]
|
||||
public void RateLimits_GetForJobType_ReturnsExpectedValues(
|
||||
string jobType,
|
||||
int expectedMaxConcurrent,
|
||||
int expectedMaxPerHour,
|
||||
int expectedDuration)
|
||||
{
|
||||
var rateLimit = ExportJobPolicy.RateLimits.GetForJobType(jobType);
|
||||
|
||||
Assert.Equal(expectedMaxConcurrent, rateLimit.MaxConcurrent);
|
||||
Assert.Equal(expectedMaxPerHour, rateLimit.MaxPerHour);
|
||||
Assert.Equal(expectedDuration, rateLimit.EstimatedDurationSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RateLimits_GetForJobType_ReturnsDefaultForUnknownType()
|
||||
{
|
||||
var rateLimit = ExportJobPolicy.RateLimits.GetForJobType("export.unknown");
|
||||
|
||||
Assert.Equal(3, rateLimit.MaxConcurrent);
|
||||
Assert.Equal(30, rateLimit.MaxPerHour);
|
||||
Assert.Equal(120, rateLimit.EstimatedDurationSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Timeouts_HaveReasonableValues()
|
||||
{
|
||||
Assert.Equal(TimeSpan.FromHours(2), ExportJobPolicy.Timeouts.MaxJobDuration);
|
||||
Assert.Equal(TimeSpan.FromMinutes(5), ExportJobPolicy.Timeouts.HeartbeatTimeout);
|
||||
Assert.Equal(TimeSpan.FromMinutes(1), ExportJobPolicy.Timeouts.RetryBackoff);
|
||||
Assert.Equal(TimeSpan.FromMinutes(30), ExportJobPolicy.Timeouts.MaxRetryBackoff);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateDefaultQuota_CreatesValidQuota()
|
||||
{
|
||||
var quota = ExportJobPolicy.CreateDefaultQuota("tenant-1", ExportJobTypes.Ledger, "test-user");
|
||||
|
||||
Assert.NotEqual(Guid.Empty, quota.QuotaId);
|
||||
Assert.Equal("tenant-1", quota.TenantId);
|
||||
Assert.Equal(ExportJobTypes.Ledger, quota.JobType);
|
||||
Assert.Equal(3, quota.MaxActive); // Ledger specific
|
||||
Assert.Equal(30, quota.MaxPerHour); // Ledger specific
|
||||
Assert.Equal(ExportJobPolicy.QuotaDefaults.BurstCapacity, quota.BurstCapacity);
|
||||
Assert.Equal(ExportJobPolicy.QuotaDefaults.RefillRate, quota.RefillRate);
|
||||
Assert.Equal(quota.BurstCapacity, quota.CurrentTokens);
|
||||
Assert.Equal(0, quota.CurrentActive);
|
||||
Assert.Equal(0, quota.CurrentHourCount);
|
||||
Assert.False(quota.Paused);
|
||||
Assert.Null(quota.PauseReason);
|
||||
Assert.Null(quota.QuotaTicket);
|
||||
Assert.Equal("test-user", quota.UpdatedBy);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateDefaultQuota_WithoutJobType_UsesGlobalDefaults()
|
||||
{
|
||||
var quota = ExportJobPolicy.CreateDefaultQuota("tenant-1", jobType: null, "test-user");
|
||||
|
||||
Assert.Equal("tenant-1", quota.TenantId);
|
||||
Assert.Null(quota.JobType);
|
||||
Assert.Equal(ExportJobPolicy.QuotaDefaults.MaxActive, quota.MaxActive);
|
||||
Assert.Equal(ExportJobPolicy.QuotaDefaults.MaxPerHour, quota.MaxPerHour);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateDefaultQuota_SetsCurrentTimeFields()
|
||||
{
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
var quota = ExportJobPolicy.CreateDefaultQuota("tenant-1", ExportJobTypes.Sbom, "test-user");
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.InRange(quota.CreatedAt, before, after);
|
||||
Assert.InRange(quota.UpdatedAt, before, after);
|
||||
Assert.InRange(quota.LastRefillAt, before, after);
|
||||
Assert.InRange(quota.CurrentHourStart, before, after);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(ExportJobTypes.Ledger)]
|
||||
[InlineData(ExportJobTypes.Sbom)]
|
||||
[InlineData(ExportJobTypes.Attestation)]
|
||||
[InlineData(ExportJobTypes.PortableBundle)]
|
||||
public void CreateDefaultQuota_UsesTypeSpecificLimits(string jobType)
|
||||
{
|
||||
var expectedLimit = ExportJobPolicy.RateLimits.GetForJobType(jobType);
|
||||
var quota = ExportJobPolicy.CreateDefaultQuota("tenant-1", jobType, "test-user");
|
||||
|
||||
Assert.Equal(expectedLimit.MaxConcurrent, quota.MaxActive);
|
||||
Assert.Equal(expectedLimit.MaxPerHour, quota.MaxPerHour);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RateLimits_PortableBundle_HasLowestLimits()
|
||||
{
|
||||
var portableBundle = ExportJobPolicy.RateLimits.PortableBundle;
|
||||
var ledger = ExportJobPolicy.RateLimits.Ledger;
|
||||
var sbom = ExportJobPolicy.RateLimits.Sbom;
|
||||
|
||||
// Portable bundle should have the most restrictive limits
|
||||
Assert.True(portableBundle.MaxConcurrent <= ledger.MaxConcurrent);
|
||||
Assert.True(portableBundle.MaxConcurrent <= sbom.MaxConcurrent);
|
||||
Assert.True(portableBundle.MaxPerHour <= ledger.MaxPerHour);
|
||||
Assert.True(portableBundle.MaxPerHour <= sbom.MaxPerHour);
|
||||
Assert.True(portableBundle.EstimatedDurationSeconds >= ledger.EstimatedDurationSeconds);
|
||||
Assert.True(portableBundle.EstimatedDurationSeconds >= sbom.EstimatedDurationSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RateLimits_AllDefinedTypesHaveLimits()
|
||||
{
|
||||
foreach (var jobType in ExportJobTypes.All)
|
||||
{
|
||||
var rateLimit = ExportJobPolicy.RateLimits.GetForJobType(jobType);
|
||||
|
||||
Assert.True(rateLimit.MaxConcurrent > 0, $"MaxConcurrent for {jobType} should be positive");
|
||||
Assert.True(rateLimit.MaxPerHour > 0, $"MaxPerHour for {jobType} should be positive");
|
||||
Assert.True(rateLimit.EstimatedDurationSeconds > 0, $"EstimatedDurationSeconds for {jobType} should be positive");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,97 @@
|
||||
using StellaOps.Orchestrator.Core.Domain.Export;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for ExportJobTypes constants and helpers.
|
||||
/// </summary>
|
||||
public sealed class ExportJobTypesTests
|
||||
{
|
||||
[Fact]
|
||||
public void Prefix_HasExpectedValue()
|
||||
{
|
||||
Assert.Equal("export.", ExportJobTypes.Prefix);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void All_ContainsAllDefinedTypes()
|
||||
{
|
||||
Assert.Contains(ExportJobTypes.Ledger, ExportJobTypes.All);
|
||||
Assert.Contains(ExportJobTypes.Sbom, ExportJobTypes.All);
|
||||
Assert.Contains(ExportJobTypes.Vex, ExportJobTypes.All);
|
||||
Assert.Contains(ExportJobTypes.ScanResults, ExportJobTypes.All);
|
||||
Assert.Contains(ExportJobTypes.PolicyEvaluation, ExportJobTypes.All);
|
||||
Assert.Contains(ExportJobTypes.Attestation, ExportJobTypes.All);
|
||||
Assert.Contains(ExportJobTypes.PortableBundle, ExportJobTypes.All);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void All_TypesStartWithPrefix()
|
||||
{
|
||||
foreach (var jobType in ExportJobTypes.All)
|
||||
{
|
||||
Assert.StartsWith(ExportJobTypes.Prefix, jobType);
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("export.ledger", true)]
|
||||
[InlineData("export.sbom", true)]
|
||||
[InlineData("export.vex", true)]
|
||||
[InlineData("export.scan-results", true)]
|
||||
[InlineData("export.policy-evaluation", true)]
|
||||
[InlineData("export.attestation", true)]
|
||||
[InlineData("export.portable-bundle", true)]
|
||||
[InlineData("export.custom", true)]
|
||||
[InlineData("EXPORT.LEDGER", true)]
|
||||
[InlineData("scan.image", false)]
|
||||
[InlineData("advisory.nvd", false)]
|
||||
[InlineData("", false)]
|
||||
[InlineData(null, false)]
|
||||
public void IsExportJob_ReturnsCorrectResult(string? jobType, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, ExportJobTypes.IsExportJob(jobType));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("export.ledger", "ledger")]
|
||||
[InlineData("export.sbom", "sbom")]
|
||||
[InlineData("export.vex", "vex")]
|
||||
[InlineData("export.scan-results", "scan-results")]
|
||||
[InlineData("export.policy-evaluation", "policy-evaluation")]
|
||||
[InlineData("export.attestation", "attestation")]
|
||||
[InlineData("export.portable-bundle", "portable-bundle")]
|
||||
[InlineData("export.custom-format", "custom-format")]
|
||||
public void GetExportTarget_ReturnsTargetForExportJob(string jobType, string expectedTarget)
|
||||
{
|
||||
Assert.Equal(expectedTarget, ExportJobTypes.GetExportTarget(jobType));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("scan.image")]
|
||||
[InlineData("advisory.nvd")]
|
||||
[InlineData("")]
|
||||
[InlineData(null)]
|
||||
public void GetExportTarget_ReturnsNullForNonExportJob(string? jobType)
|
||||
{
|
||||
Assert.Null(ExportJobTypes.GetExportTarget(jobType));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetExportTarget_ReturnsNullForPrefixOnly()
|
||||
{
|
||||
Assert.Null(ExportJobTypes.GetExportTarget("export."));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobTypes_HaveExpectedValues()
|
||||
{
|
||||
Assert.Equal("export.ledger", ExportJobTypes.Ledger);
|
||||
Assert.Equal("export.sbom", ExportJobTypes.Sbom);
|
||||
Assert.Equal("export.vex", ExportJobTypes.Vex);
|
||||
Assert.Equal("export.scan-results", ExportJobTypes.ScanResults);
|
||||
Assert.Equal("export.policy-evaluation", ExportJobTypes.PolicyEvaluation);
|
||||
Assert.Equal("export.attestation", ExportJobTypes.Attestation);
|
||||
Assert.Equal("export.portable-bundle", ExportJobTypes.PortableBundle);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,338 @@
|
||||
using StellaOps.Orchestrator.Core.Domain.Export;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for ExportRetention policy.
|
||||
/// </summary>
|
||||
public sealed class ExportRetentionTests
|
||||
{
|
||||
[Fact]
|
||||
public void Default_CreatesDefaultPolicy()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var retention = ExportRetention.Default(now);
|
||||
|
||||
Assert.Equal(ExportRetention.PolicyNames.Default, retention.PolicyName);
|
||||
Assert.Equal(now, retention.AvailableAt);
|
||||
Assert.NotNull(retention.ArchiveAt);
|
||||
Assert.NotNull(retention.ExpiresAt);
|
||||
Assert.Null(retention.ArchivedAt);
|
||||
Assert.Null(retention.DeletedAt);
|
||||
Assert.False(retention.LegalHold);
|
||||
Assert.False(retention.RequiresRelease);
|
||||
Assert.Equal(0, retention.ExtensionCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Default_SetsCorrectPeriods()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var retention = ExportRetention.Default(now);
|
||||
|
||||
var archiveAt = retention.ArchiveAt!.Value;
|
||||
var expiresAt = retention.ExpiresAt!.Value;
|
||||
|
||||
Assert.Equal(now.Add(ExportRetention.DefaultPeriods.ArchiveDelay), archiveAt);
|
||||
Assert.Equal(now.Add(ExportRetention.DefaultPeriods.Default), expiresAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Temporary_CreatesShorterRetention()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var retention = ExportRetention.Temporary(now);
|
||||
|
||||
Assert.Equal(ExportRetention.PolicyNames.Temporary, retention.PolicyName);
|
||||
Assert.Null(retention.ArchiveAt); // No archive for temporary
|
||||
Assert.Equal(now.Add(ExportRetention.DefaultPeriods.Temporary), retention.ExpiresAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compliance_RequiresRelease()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var retention = ExportRetention.Compliance(now, TimeSpan.FromDays(365));
|
||||
|
||||
Assert.Equal(ExportRetention.PolicyNames.Compliance, retention.PolicyName);
|
||||
Assert.True(retention.RequiresRelease);
|
||||
Assert.Equal(now.Add(TimeSpan.FromDays(365)), retention.ExpiresAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsExpired_ReturnsTrueWhenExpired()
|
||||
{
|
||||
var past = DateTimeOffset.UtcNow.AddDays(-1);
|
||||
var retention = new ExportRetention(
|
||||
PolicyName: "test",
|
||||
AvailableAt: past.AddDays(-2),
|
||||
ArchiveAt: null,
|
||||
ExpiresAt: past,
|
||||
ArchivedAt: null,
|
||||
DeletedAt: null,
|
||||
LegalHold: false,
|
||||
LegalHoldReason: null,
|
||||
RequiresRelease: false,
|
||||
ReleasedBy: null,
|
||||
ReleasedAt: null,
|
||||
ExtensionCount: 0,
|
||||
Metadata: null);
|
||||
|
||||
Assert.True(retention.IsExpired);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsExpired_ReturnsFalseWhenNotExpired()
|
||||
{
|
||||
var future = DateTimeOffset.UtcNow.AddDays(1);
|
||||
var retention = new ExportRetention(
|
||||
PolicyName: "test",
|
||||
AvailableAt: DateTimeOffset.UtcNow,
|
||||
ArchiveAt: null,
|
||||
ExpiresAt: future,
|
||||
ArchivedAt: null,
|
||||
DeletedAt: null,
|
||||
LegalHold: false,
|
||||
LegalHoldReason: null,
|
||||
RequiresRelease: false,
|
||||
ReleasedBy: null,
|
||||
ReleasedAt: null,
|
||||
ExtensionCount: 0,
|
||||
Metadata: null);
|
||||
|
||||
Assert.False(retention.IsExpired);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsExpired_ReturnsFalseWhenLegalHold()
|
||||
{
|
||||
var past = DateTimeOffset.UtcNow.AddDays(-1);
|
||||
var retention = new ExportRetention(
|
||||
PolicyName: "test",
|
||||
AvailableAt: past.AddDays(-2),
|
||||
ArchiveAt: null,
|
||||
ExpiresAt: past,
|
||||
ArchivedAt: null,
|
||||
DeletedAt: null,
|
||||
LegalHold: true,
|
||||
LegalHoldReason: "Investigation",
|
||||
RequiresRelease: false,
|
||||
ReleasedBy: null,
|
||||
ReleasedAt: null,
|
||||
ExtensionCount: 0,
|
||||
Metadata: null);
|
||||
|
||||
Assert.False(retention.IsExpired); // Legal hold prevents expiration
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldArchive_ReturnsTrueWhenArchiveTimePassed()
|
||||
{
|
||||
var past = DateTimeOffset.UtcNow.AddDays(-1);
|
||||
var retention = new ExportRetention(
|
||||
PolicyName: "test",
|
||||
AvailableAt: past.AddDays(-2),
|
||||
ArchiveAt: past,
|
||||
ExpiresAt: DateTimeOffset.UtcNow.AddDays(30),
|
||||
ArchivedAt: null,
|
||||
DeletedAt: null,
|
||||
LegalHold: false,
|
||||
LegalHoldReason: null,
|
||||
RequiresRelease: false,
|
||||
ReleasedBy: null,
|
||||
ReleasedAt: null,
|
||||
ExtensionCount: 0,
|
||||
Metadata: null);
|
||||
|
||||
Assert.True(retention.ShouldArchive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldArchive_ReturnsFalseWhenAlreadyArchived()
|
||||
{
|
||||
var past = DateTimeOffset.UtcNow.AddDays(-1);
|
||||
var retention = new ExportRetention(
|
||||
PolicyName: "test",
|
||||
AvailableAt: past.AddDays(-2),
|
||||
ArchiveAt: past,
|
||||
ExpiresAt: DateTimeOffset.UtcNow.AddDays(30),
|
||||
ArchivedAt: past.AddHours(-1), // Already archived
|
||||
DeletedAt: null,
|
||||
LegalHold: false,
|
||||
LegalHoldReason: null,
|
||||
RequiresRelease: false,
|
||||
ReleasedBy: null,
|
||||
ReleasedAt: null,
|
||||
ExtensionCount: 0,
|
||||
Metadata: null);
|
||||
|
||||
Assert.False(retention.ShouldArchive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanDelete_RequiresExpirationAndRelease()
|
||||
{
|
||||
var past = DateTimeOffset.UtcNow.AddDays(-1);
|
||||
|
||||
// Expired but requires release
|
||||
var retention = new ExportRetention(
|
||||
PolicyName: "test",
|
||||
AvailableAt: past.AddDays(-2),
|
||||
ArchiveAt: null,
|
||||
ExpiresAt: past,
|
||||
ArchivedAt: null,
|
||||
DeletedAt: null,
|
||||
LegalHold: false,
|
||||
LegalHoldReason: null,
|
||||
RequiresRelease: true,
|
||||
ReleasedBy: null,
|
||||
ReleasedAt: null,
|
||||
ExtensionCount: 0,
|
||||
Metadata: null);
|
||||
|
||||
Assert.False(retention.CanDelete); // Not released
|
||||
|
||||
// Now release
|
||||
var released = retention.Release("admin@example.com");
|
||||
Assert.True(released.CanDelete);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtendRetention_ExtendsExpiration()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var retention = ExportRetention.Default(now);
|
||||
|
||||
var extended = retention.ExtendRetention(TimeSpan.FromDays(30), "Customer request");
|
||||
|
||||
Assert.Equal(1, extended.ExtensionCount);
|
||||
Assert.Equal(retention.ExpiresAt!.Value.AddDays(30), extended.ExpiresAt);
|
||||
Assert.NotNull(extended.Metadata);
|
||||
Assert.Contains("extension_1_reason", extended.Metadata.Keys);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtendRetention_CanExtendMultipleTimes()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var retention = ExportRetention.Default(now);
|
||||
|
||||
var extended = retention
|
||||
.ExtendRetention(TimeSpan.FromDays(10), "First extension")
|
||||
.ExtendRetention(TimeSpan.FromDays(20), "Second extension");
|
||||
|
||||
Assert.Equal(2, extended.ExtensionCount);
|
||||
Assert.Equal(retention.ExpiresAt!.Value.AddDays(30), extended.ExpiresAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PlaceLegalHold_SetsHoldAndReason()
|
||||
{
|
||||
var retention = ExportRetention.Default(DateTimeOffset.UtcNow);
|
||||
|
||||
var held = retention.PlaceLegalHold("Legal investigation pending");
|
||||
|
||||
Assert.True(held.LegalHold);
|
||||
Assert.Equal("Legal investigation pending", held.LegalHoldReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReleaseLegalHold_ClearsHold()
|
||||
{
|
||||
var retention = ExportRetention.Default(DateTimeOffset.UtcNow)
|
||||
.PlaceLegalHold("Investigation");
|
||||
|
||||
var released = retention.ReleaseLegalHold();
|
||||
|
||||
Assert.False(released.LegalHold);
|
||||
Assert.Null(released.LegalHoldReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Release_SetsReleasedByAndAt()
|
||||
{
|
||||
var retention = ExportRetention.Compliance(DateTimeOffset.UtcNow, TimeSpan.FromDays(365));
|
||||
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
var released = retention.Release("admin@example.com");
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.Equal("admin@example.com", released.ReleasedBy);
|
||||
Assert.NotNull(released.ReleasedAt);
|
||||
Assert.InRange(released.ReleasedAt.Value, before, after);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MarkArchived_SetsArchivedAt()
|
||||
{
|
||||
var retention = ExportRetention.Default(DateTimeOffset.UtcNow);
|
||||
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
var archived = retention.MarkArchived();
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.NotNull(archived.ArchivedAt);
|
||||
Assert.InRange(archived.ArchivedAt.Value, before, after);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MarkDeleted_SetsDeletedAt()
|
||||
{
|
||||
var retention = ExportRetention.Temporary(DateTimeOffset.UtcNow);
|
||||
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
var deleted = retention.MarkDeleted();
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.NotNull(deleted.DeletedAt);
|
||||
Assert.InRange(deleted.DeletedAt.Value, before, after);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToJson_SerializesCorrectly()
|
||||
{
|
||||
var retention = ExportRetention.Default(DateTimeOffset.UtcNow);
|
||||
var json = retention.ToJson();
|
||||
|
||||
Assert.Contains("\"policyName\":\"default\"", json);
|
||||
Assert.Contains("\"legalHold\":false", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromJson_DeserializesCorrectly()
|
||||
{
|
||||
var original = ExportRetention.Default(DateTimeOffset.UtcNow);
|
||||
var json = original.ToJson();
|
||||
var deserialized = ExportRetention.FromJson(json);
|
||||
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(original.PolicyName, deserialized.PolicyName);
|
||||
Assert.Equal(original.LegalHold, deserialized.LegalHold);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromJson_ReturnsNullForInvalidJson()
|
||||
{
|
||||
var result = ExportRetention.FromJson("not valid json");
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PolicyNames_ContainsExpectedValues()
|
||||
{
|
||||
Assert.Equal("default", ExportRetention.PolicyNames.Default);
|
||||
Assert.Equal("compliance", ExportRetention.PolicyNames.Compliance);
|
||||
Assert.Equal("temporary", ExportRetention.PolicyNames.Temporary);
|
||||
Assert.Equal("long-term", ExportRetention.PolicyNames.LongTerm);
|
||||
Assert.Equal("permanent", ExportRetention.PolicyNames.Permanent);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DefaultPeriods_HaveReasonableValues()
|
||||
{
|
||||
Assert.Equal(TimeSpan.FromDays(7), ExportRetention.DefaultPeriods.Temporary);
|
||||
Assert.Equal(TimeSpan.FromDays(30), ExportRetention.DefaultPeriods.Default);
|
||||
Assert.Equal(TimeSpan.FromDays(365), ExportRetention.DefaultPeriods.LongTerm);
|
||||
Assert.Equal(TimeSpan.FromDays(90), ExportRetention.DefaultPeriods.ArchiveDelay);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,711 @@
|
||||
using StellaOps.Orchestrator.Core.Domain.Export;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for ExportSchedule and related scheduling types.
|
||||
/// </summary>
|
||||
public sealed class ExportScheduleTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_CreatesScheduleWithDefaults()
|
||||
{
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
|
||||
var schedule = ExportSchedule.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Daily SBOM Export",
|
||||
exportType: "export.sbom",
|
||||
cronExpression: "0 0 * * *",
|
||||
payloadTemplate: payload,
|
||||
createdBy: "admin@example.com");
|
||||
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.NotEqual(Guid.Empty, schedule.ScheduleId);
|
||||
Assert.Equal("tenant-1", schedule.TenantId);
|
||||
Assert.Equal("Daily SBOM Export", schedule.Name);
|
||||
Assert.Equal("export.sbom", schedule.ExportType);
|
||||
Assert.Equal("0 0 * * *", schedule.CronExpression);
|
||||
Assert.Equal("UTC", schedule.Timezone);
|
||||
Assert.True(schedule.Enabled);
|
||||
Assert.Equal("default", schedule.RetentionPolicy);
|
||||
Assert.Null(schedule.ProjectId);
|
||||
Assert.Equal(1, schedule.MaxConcurrent);
|
||||
Assert.True(schedule.SkipIfRunning);
|
||||
Assert.Null(schedule.LastRunAt);
|
||||
Assert.Null(schedule.LastJobId);
|
||||
Assert.Null(schedule.LastRunStatus);
|
||||
Assert.Null(schedule.NextRunAt);
|
||||
Assert.Equal(0, schedule.TotalRuns);
|
||||
Assert.Equal(0, schedule.SuccessfulRuns);
|
||||
Assert.Equal(0, schedule.FailedRuns);
|
||||
Assert.InRange(schedule.CreatedAt, before, after);
|
||||
Assert.Equal(schedule.CreatedAt, schedule.UpdatedAt);
|
||||
Assert.Equal("admin@example.com", schedule.CreatedBy);
|
||||
Assert.Equal("admin@example.com", schedule.UpdatedBy);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_AcceptsOptionalParameters()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
|
||||
var schedule = ExportSchedule.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Weekly Report",
|
||||
exportType: "export.report",
|
||||
cronExpression: "0 0 * * SUN",
|
||||
payloadTemplate: payload,
|
||||
createdBy: "admin@example.com",
|
||||
description: "Weekly compliance report",
|
||||
timezone: "America/New_York",
|
||||
retentionPolicy: "compliance",
|
||||
projectId: "project-123",
|
||||
maxConcurrent: 3,
|
||||
skipIfRunning: false);
|
||||
|
||||
Assert.Equal("Weekly compliance report", schedule.Description);
|
||||
Assert.Equal("America/New_York", schedule.Timezone);
|
||||
Assert.Equal("compliance", schedule.RetentionPolicy);
|
||||
Assert.Equal("project-123", schedule.ProjectId);
|
||||
Assert.Equal(3, schedule.MaxConcurrent);
|
||||
Assert.False(schedule.SkipIfRunning);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Enable_EnablesSchedule()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
var schedule = ExportSchedule.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test",
|
||||
exportType: "export.sbom",
|
||||
cronExpression: "0 0 * * *",
|
||||
payloadTemplate: payload,
|
||||
createdBy: "admin@example.com");
|
||||
|
||||
var disabled = schedule.Disable();
|
||||
Assert.False(disabled.Enabled);
|
||||
|
||||
var enabled = disabled.Enable();
|
||||
Assert.True(enabled.Enabled);
|
||||
Assert.True(enabled.UpdatedAt > disabled.UpdatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Disable_DisablesSchedule()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
var schedule = ExportSchedule.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test",
|
||||
exportType: "export.sbom",
|
||||
cronExpression: "0 0 * * *",
|
||||
payloadTemplate: payload,
|
||||
createdBy: "admin@example.com");
|
||||
|
||||
var disabled = schedule.Disable();
|
||||
|
||||
Assert.False(disabled.Enabled);
|
||||
Assert.True(disabled.UpdatedAt >= schedule.UpdatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordSuccess_UpdatesRunStatistics()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
var schedule = ExportSchedule.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test",
|
||||
exportType: "export.sbom",
|
||||
cronExpression: "0 0 * * *",
|
||||
payloadTemplate: payload,
|
||||
createdBy: "admin@example.com");
|
||||
|
||||
var jobId = Guid.NewGuid();
|
||||
var nextRun = DateTimeOffset.UtcNow.AddDays(1);
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
|
||||
var updated = schedule.RecordSuccess(jobId, nextRun);
|
||||
|
||||
Assert.NotNull(updated.LastRunAt);
|
||||
Assert.True(updated.LastRunAt >= before);
|
||||
Assert.Equal(jobId, updated.LastJobId);
|
||||
Assert.Equal("completed", updated.LastRunStatus);
|
||||
Assert.Equal(nextRun, updated.NextRunAt);
|
||||
Assert.Equal(1, updated.TotalRuns);
|
||||
Assert.Equal(1, updated.SuccessfulRuns);
|
||||
Assert.Equal(0, updated.FailedRuns);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordFailure_UpdatesRunStatistics()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
var schedule = ExportSchedule.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test",
|
||||
exportType: "export.sbom",
|
||||
cronExpression: "0 0 * * *",
|
||||
payloadTemplate: payload,
|
||||
createdBy: "admin@example.com");
|
||||
|
||||
var jobId = Guid.NewGuid();
|
||||
var nextRun = DateTimeOffset.UtcNow.AddDays(1);
|
||||
|
||||
var updated = schedule.RecordFailure(jobId, "Database connection failed", nextRun);
|
||||
|
||||
Assert.NotNull(updated.LastRunAt);
|
||||
Assert.Equal(jobId, updated.LastJobId);
|
||||
Assert.Equal("failed: Database connection failed", updated.LastRunStatus);
|
||||
Assert.Equal(nextRun, updated.NextRunAt);
|
||||
Assert.Equal(1, updated.TotalRuns);
|
||||
Assert.Equal(0, updated.SuccessfulRuns);
|
||||
Assert.Equal(1, updated.FailedRuns);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordFailure_UsesUnknownWhenNoReason()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
var schedule = ExportSchedule.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test",
|
||||
exportType: "export.sbom",
|
||||
cronExpression: "0 0 * * *",
|
||||
payloadTemplate: payload,
|
||||
createdBy: "admin@example.com");
|
||||
|
||||
var updated = schedule.RecordFailure(Guid.NewGuid());
|
||||
|
||||
Assert.Equal("failed: unknown", updated.LastRunStatus);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SuccessRate_CalculatesCorrectly()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
var schedule = ExportSchedule.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test",
|
||||
exportType: "export.sbom",
|
||||
cronExpression: "0 0 * * *",
|
||||
payloadTemplate: payload,
|
||||
createdBy: "admin@example.com");
|
||||
|
||||
Assert.Equal(0, schedule.SuccessRate); // No runs
|
||||
|
||||
var updated = schedule
|
||||
.RecordSuccess(Guid.NewGuid())
|
||||
.RecordSuccess(Guid.NewGuid())
|
||||
.RecordSuccess(Guid.NewGuid())
|
||||
.RecordFailure(Guid.NewGuid());
|
||||
|
||||
Assert.Equal(75.0, updated.SuccessRate);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithNextRun_SetsNextRunTime()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
var schedule = ExportSchedule.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test",
|
||||
exportType: "export.sbom",
|
||||
cronExpression: "0 0 * * *",
|
||||
payloadTemplate: payload,
|
||||
createdBy: "admin@example.com");
|
||||
|
||||
var nextRun = DateTimeOffset.UtcNow.AddHours(6);
|
||||
var updated = schedule.WithNextRun(nextRun);
|
||||
|
||||
Assert.Equal(nextRun, updated.NextRunAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithCron_UpdatesCronExpression()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
var schedule = ExportSchedule.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test",
|
||||
exportType: "export.sbom",
|
||||
cronExpression: "0 0 * * *",
|
||||
payloadTemplate: payload,
|
||||
createdBy: "admin@example.com");
|
||||
|
||||
var updated = schedule.WithCron("0 */6 * * *", "scheduler@example.com");
|
||||
|
||||
Assert.Equal("0 */6 * * *", updated.CronExpression);
|
||||
Assert.Equal("scheduler@example.com", updated.UpdatedBy);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithPayload_UpdatesPayloadTemplate()
|
||||
{
|
||||
var payload = ExportJobPayload.Default("json");
|
||||
var schedule = ExportSchedule.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test",
|
||||
exportType: "export.sbom",
|
||||
cronExpression: "0 0 * * *",
|
||||
payloadTemplate: payload,
|
||||
createdBy: "admin@example.com");
|
||||
|
||||
var newPayload = ExportJobPayload.Default("ndjson") with { ProjectId = "project-2" };
|
||||
|
||||
var updated = schedule.WithPayload(newPayload, "editor@example.com");
|
||||
|
||||
Assert.Equal("project-2", updated.PayloadTemplate.ProjectId);
|
||||
Assert.Equal("ndjson", updated.PayloadTemplate.Format);
|
||||
Assert.Equal("editor@example.com", updated.UpdatedBy);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for RetentionPruneConfig.
|
||||
/// </summary>
|
||||
public sealed class RetentionPruneConfigTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_CreatesConfigWithDefaults()
|
||||
{
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
var config = RetentionPruneConfig.Create();
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.NotEqual(Guid.Empty, config.PruneId);
|
||||
Assert.Null(config.TenantId);
|
||||
Assert.Null(config.ExportType);
|
||||
Assert.True(config.Enabled);
|
||||
Assert.Equal(RetentionPruneConfig.DefaultCronExpression, config.CronExpression);
|
||||
Assert.Equal(RetentionPruneConfig.DefaultBatchSize, config.BatchSize);
|
||||
Assert.True(config.ArchiveBeforeDelete);
|
||||
Assert.Null(config.ArchiveProvider);
|
||||
Assert.False(config.NotifyOnComplete);
|
||||
Assert.Null(config.NotificationChannel);
|
||||
Assert.Null(config.LastPruneAt);
|
||||
Assert.Equal(0, config.LastPruneCount);
|
||||
Assert.Equal(0, config.TotalPruned);
|
||||
Assert.InRange(config.CreatedAt, before, after);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_AcceptsOptionalParameters()
|
||||
{
|
||||
var config = RetentionPruneConfig.Create(
|
||||
tenantId: "tenant-1",
|
||||
exportType: "export.sbom",
|
||||
cronExpression: "0 3 * * *",
|
||||
batchSize: 50);
|
||||
|
||||
Assert.Equal("tenant-1", config.TenantId);
|
||||
Assert.Equal("export.sbom", config.ExportType);
|
||||
Assert.Equal("0 3 * * *", config.CronExpression);
|
||||
Assert.Equal(50, config.BatchSize);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DefaultBatchSize_IsReasonable()
|
||||
{
|
||||
Assert.Equal(100, RetentionPruneConfig.DefaultBatchSize);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DefaultCronExpression_IsDailyAt2AM()
|
||||
{
|
||||
Assert.Equal("0 2 * * *", RetentionPruneConfig.DefaultCronExpression);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordPrune_UpdatesStatistics()
|
||||
{
|
||||
var config = RetentionPruneConfig.Create();
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
|
||||
var updated = config.RecordPrune(25);
|
||||
|
||||
Assert.NotNull(updated.LastPruneAt);
|
||||
Assert.True(updated.LastPruneAt >= before);
|
||||
Assert.Equal(25, updated.LastPruneCount);
|
||||
Assert.Equal(25, updated.TotalPruned);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordPrune_AccumulatesTotal()
|
||||
{
|
||||
var config = RetentionPruneConfig.Create();
|
||||
|
||||
var updated = config
|
||||
.RecordPrune(10)
|
||||
.RecordPrune(15)
|
||||
.RecordPrune(20);
|
||||
|
||||
Assert.Equal(20, updated.LastPruneCount);
|
||||
Assert.Equal(45, updated.TotalPruned);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for ExportAlertConfig.
|
||||
/// </summary>
|
||||
public sealed class ExportAlertConfigTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_CreatesConfigWithDefaults()
|
||||
{
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
|
||||
var config = ExportAlertConfig.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "SBOM Export Failures");
|
||||
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.NotEqual(Guid.Empty, config.AlertConfigId);
|
||||
Assert.Equal("tenant-1", config.TenantId);
|
||||
Assert.Equal("SBOM Export Failures", config.Name);
|
||||
Assert.Null(config.ExportType);
|
||||
Assert.True(config.Enabled);
|
||||
Assert.Equal(3, config.ConsecutiveFailuresThreshold);
|
||||
Assert.Equal(50.0, config.FailureRateThreshold);
|
||||
Assert.Equal(TimeSpan.FromHours(1), config.FailureRateWindow);
|
||||
Assert.Equal(ExportAlertSeverity.Warning, config.Severity);
|
||||
Assert.Equal("email", config.NotificationChannels);
|
||||
Assert.Equal(TimeSpan.FromMinutes(15), config.Cooldown);
|
||||
Assert.Null(config.LastAlertAt);
|
||||
Assert.Equal(0, config.TotalAlerts);
|
||||
Assert.InRange(config.CreatedAt, before, after);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_AcceptsOptionalParameters()
|
||||
{
|
||||
var config = ExportAlertConfig.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Critical Export Failures",
|
||||
exportType: "export.report",
|
||||
consecutiveFailuresThreshold: 5,
|
||||
failureRateThreshold: 25.0,
|
||||
severity: ExportAlertSeverity.Critical);
|
||||
|
||||
Assert.Equal("export.report", config.ExportType);
|
||||
Assert.Equal(5, config.ConsecutiveFailuresThreshold);
|
||||
Assert.Equal(25.0, config.FailureRateThreshold);
|
||||
Assert.Equal(ExportAlertSeverity.Critical, config.Severity);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanAlert_ReturnsTrueWhenNoLastAlert()
|
||||
{
|
||||
var config = ExportAlertConfig.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test Alert");
|
||||
|
||||
Assert.True(config.CanAlert);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanAlert_ReturnsFalseWithinCooldown()
|
||||
{
|
||||
var config = ExportAlertConfig.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test Alert");
|
||||
|
||||
var alerted = config.RecordAlert();
|
||||
|
||||
Assert.False(alerted.CanAlert);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanAlert_ReturnsTrueAfterCooldown()
|
||||
{
|
||||
var config = new ExportAlertConfig(
|
||||
AlertConfigId: Guid.NewGuid(),
|
||||
TenantId: "tenant-1",
|
||||
Name: "Test Alert",
|
||||
ExportType: null,
|
||||
Enabled: true,
|
||||
ConsecutiveFailuresThreshold: 3,
|
||||
FailureRateThreshold: 50.0,
|
||||
FailureRateWindow: TimeSpan.FromHours(1),
|
||||
Severity: ExportAlertSeverity.Warning,
|
||||
NotificationChannels: "email",
|
||||
Cooldown: TimeSpan.FromMinutes(15),
|
||||
LastAlertAt: DateTimeOffset.UtcNow.AddMinutes(-20), // Past cooldown
|
||||
TotalAlerts: 1,
|
||||
CreatedAt: DateTimeOffset.UtcNow.AddDays(-1),
|
||||
UpdatedAt: DateTimeOffset.UtcNow.AddMinutes(-20));
|
||||
|
||||
Assert.True(config.CanAlert);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordAlert_UpdatesTimestampAndCount()
|
||||
{
|
||||
var config = ExportAlertConfig.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test Alert");
|
||||
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
var updated = config.RecordAlert();
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.NotNull(updated.LastAlertAt);
|
||||
Assert.InRange(updated.LastAlertAt.Value, before, after);
|
||||
Assert.Equal(1, updated.TotalAlerts);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordAlert_AccumulatesAlertCount()
|
||||
{
|
||||
var config = ExportAlertConfig.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "Test Alert");
|
||||
|
||||
// Simulate multiple alerts with cooldown passage
|
||||
var updated = config with
|
||||
{
|
||||
LastAlertAt = DateTimeOffset.UtcNow.AddMinutes(-20),
|
||||
TotalAlerts = 5
|
||||
};
|
||||
|
||||
var alerted = updated.RecordAlert();
|
||||
Assert.Equal(6, alerted.TotalAlerts);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for ExportAlert.
|
||||
/// </summary>
|
||||
public sealed class ExportAlertTests
|
||||
{
|
||||
[Fact]
|
||||
public void CreateForConsecutiveFailures_CreatesAlert()
|
||||
{
|
||||
var configId = Guid.NewGuid();
|
||||
var failedJobs = new List<Guid> { Guid.NewGuid(), Guid.NewGuid(), Guid.NewGuid() };
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
|
||||
var alert = ExportAlert.CreateForConsecutiveFailures(
|
||||
alertConfigId: configId,
|
||||
tenantId: "tenant-1",
|
||||
exportType: "export.sbom",
|
||||
severity: ExportAlertSeverity.Error,
|
||||
failedJobIds: failedJobs,
|
||||
consecutiveFailures: 3);
|
||||
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.NotEqual(Guid.Empty, alert.AlertId);
|
||||
Assert.Equal(configId, alert.AlertConfigId);
|
||||
Assert.Equal("tenant-1", alert.TenantId);
|
||||
Assert.Equal("export.sbom", alert.ExportType);
|
||||
Assert.Equal(ExportAlertSeverity.Error, alert.Severity);
|
||||
Assert.Contains("failed 3 consecutive times", alert.Message);
|
||||
Assert.Equal(3, alert.FailedJobIds.Count);
|
||||
Assert.Equal(3, alert.ConsecutiveFailures);
|
||||
Assert.Equal(0, alert.FailureRate);
|
||||
Assert.InRange(alert.TriggeredAt, before, after);
|
||||
Assert.Null(alert.AcknowledgedAt);
|
||||
Assert.Null(alert.AcknowledgedBy);
|
||||
Assert.Null(alert.ResolvedAt);
|
||||
Assert.Null(alert.ResolutionNotes);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateForHighFailureRate_CreatesAlert()
|
||||
{
|
||||
var configId = Guid.NewGuid();
|
||||
var failedJobs = new List<Guid> { Guid.NewGuid(), Guid.NewGuid() };
|
||||
|
||||
var alert = ExportAlert.CreateForHighFailureRate(
|
||||
alertConfigId: configId,
|
||||
tenantId: "tenant-1",
|
||||
exportType: "export.report",
|
||||
severity: ExportAlertSeverity.Warning,
|
||||
failureRate: 75.5,
|
||||
recentFailedJobIds: failedJobs);
|
||||
|
||||
Assert.Contains("failure rate is 75.5%", alert.Message);
|
||||
Assert.Equal(0, alert.ConsecutiveFailures);
|
||||
Assert.Equal(75.5, alert.FailureRate);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Acknowledge_SetsAcknowledgementInfo()
|
||||
{
|
||||
var alert = ExportAlert.CreateForConsecutiveFailures(
|
||||
alertConfigId: Guid.NewGuid(),
|
||||
tenantId: "tenant-1",
|
||||
exportType: "export.sbom",
|
||||
severity: ExportAlertSeverity.Error,
|
||||
failedJobIds: [Guid.NewGuid()],
|
||||
consecutiveFailures: 1);
|
||||
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
var acknowledged = alert.Acknowledge("operator@example.com");
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.NotNull(acknowledged.AcknowledgedAt);
|
||||
Assert.InRange(acknowledged.AcknowledgedAt.Value, before, after);
|
||||
Assert.Equal("operator@example.com", acknowledged.AcknowledgedBy);
|
||||
Assert.True(acknowledged.IsActive); // Still active until resolved
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resolve_SetsResolutionInfo()
|
||||
{
|
||||
var alert = ExportAlert.CreateForConsecutiveFailures(
|
||||
alertConfigId: Guid.NewGuid(),
|
||||
tenantId: "tenant-1",
|
||||
exportType: "export.sbom",
|
||||
severity: ExportAlertSeverity.Error,
|
||||
failedJobIds: [Guid.NewGuid()],
|
||||
consecutiveFailures: 1);
|
||||
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
var resolved = alert.Resolve("Fixed database connection issue");
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.NotNull(resolved.ResolvedAt);
|
||||
Assert.InRange(resolved.ResolvedAt.Value, before, after);
|
||||
Assert.Equal("Fixed database connection issue", resolved.ResolutionNotes);
|
||||
Assert.False(resolved.IsActive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resolve_WorksWithoutNotes()
|
||||
{
|
||||
var alert = ExportAlert.CreateForConsecutiveFailures(
|
||||
alertConfigId: Guid.NewGuid(),
|
||||
tenantId: "tenant-1",
|
||||
exportType: "export.sbom",
|
||||
severity: ExportAlertSeverity.Error,
|
||||
failedJobIds: [Guid.NewGuid()],
|
||||
consecutiveFailures: 1);
|
||||
|
||||
var resolved = alert.Resolve();
|
||||
|
||||
Assert.NotNull(resolved.ResolvedAt);
|
||||
Assert.Null(resolved.ResolutionNotes);
|
||||
Assert.False(resolved.IsActive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsActive_ReturnsTrueWhenNotResolved()
|
||||
{
|
||||
var alert = ExportAlert.CreateForConsecutiveFailures(
|
||||
alertConfigId: Guid.NewGuid(),
|
||||
tenantId: "tenant-1",
|
||||
exportType: "export.sbom",
|
||||
severity: ExportAlertSeverity.Error,
|
||||
failedJobIds: [Guid.NewGuid()],
|
||||
consecutiveFailures: 1);
|
||||
|
||||
Assert.True(alert.IsActive);
|
||||
|
||||
var acknowledged = alert.Acknowledge("user@example.com");
|
||||
Assert.True(acknowledged.IsActive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsActive_ReturnsFalseWhenResolved()
|
||||
{
|
||||
var alert = ExportAlert.CreateForConsecutiveFailures(
|
||||
alertConfigId: Guid.NewGuid(),
|
||||
tenantId: "tenant-1",
|
||||
exportType: "export.sbom",
|
||||
severity: ExportAlertSeverity.Error,
|
||||
failedJobIds: [Guid.NewGuid()],
|
||||
consecutiveFailures: 1);
|
||||
|
||||
var resolved = alert.Resolve();
|
||||
Assert.False(resolved.IsActive);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for ExportAlertSeverity.
|
||||
/// </summary>
|
||||
public sealed class ExportAlertSeverityTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData(ExportAlertSeverity.Info, 0)]
|
||||
[InlineData(ExportAlertSeverity.Warning, 1)]
|
||||
[InlineData(ExportAlertSeverity.Error, 2)]
|
||||
[InlineData(ExportAlertSeverity.Critical, 3)]
|
||||
public void AllSeverityValues_HaveCorrectValue(ExportAlertSeverity severity, int expected)
|
||||
{
|
||||
Assert.Equal(expected, (int)severity);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Severity_CanBeCompared()
|
||||
{
|
||||
Assert.True(ExportAlertSeverity.Critical > ExportAlertSeverity.Error);
|
||||
Assert.True(ExportAlertSeverity.Error > ExportAlertSeverity.Warning);
|
||||
Assert.True(ExportAlertSeverity.Warning > ExportAlertSeverity.Info);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for RetentionPruneResult.
|
||||
/// </summary>
|
||||
public sealed class RetentionPruneResultTests
|
||||
{
|
||||
[Fact]
|
||||
public void TotalProcessed_SumsAllCounts()
|
||||
{
|
||||
var result = new RetentionPruneResult(
|
||||
ArchivedCount: 10,
|
||||
DeletedCount: 20,
|
||||
SkippedCount: 5,
|
||||
Errors: [],
|
||||
Duration: TimeSpan.FromSeconds(30));
|
||||
|
||||
Assert.Equal(35, result.TotalProcessed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HasErrors_ReturnsTrueWithErrors()
|
||||
{
|
||||
var result = new RetentionPruneResult(
|
||||
ArchivedCount: 10,
|
||||
DeletedCount: 20,
|
||||
SkippedCount: 5,
|
||||
Errors: ["Failed to delete export-123"],
|
||||
Duration: TimeSpan.FromSeconds(30));
|
||||
|
||||
Assert.True(result.HasErrors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HasErrors_ReturnsFalseWithoutErrors()
|
||||
{
|
||||
var result = new RetentionPruneResult(
|
||||
ArchivedCount: 10,
|
||||
DeletedCount: 20,
|
||||
SkippedCount: 5,
|
||||
Errors: [],
|
||||
Duration: TimeSpan.FromSeconds(30));
|
||||
|
||||
Assert.False(result.HasErrors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Empty_ReturnsZeroResult()
|
||||
{
|
||||
var empty = RetentionPruneResult.Empty;
|
||||
|
||||
Assert.Equal(0, empty.ArchivedCount);
|
||||
Assert.Equal(0, empty.DeletedCount);
|
||||
Assert.Equal(0, empty.SkippedCount);
|
||||
Assert.Empty(empty.Errors);
|
||||
Assert.Equal(TimeSpan.Zero, empty.Duration);
|
||||
Assert.Equal(0, empty.TotalProcessed);
|
||||
Assert.False(empty.HasErrors);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,249 @@
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
using StellaOps.Orchestrator.WebService.Contracts;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.PackRun;
|
||||
|
||||
public sealed class PackRunContractTests
|
||||
{
|
||||
[Fact]
|
||||
public void PackRunResponse_FromDomain_MapsAllFields()
|
||||
{
|
||||
var packRunId = Guid.NewGuid();
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var startedAt = now.AddMinutes(-5);
|
||||
var completedAt = now;
|
||||
|
||||
var packRun = new Core.Domain.PackRun(
|
||||
PackRunId: packRunId,
|
||||
TenantId: "tenant-1",
|
||||
ProjectId: "proj-1",
|
||||
PackId: "pack-alpha",
|
||||
PackVersion: "1.2.3",
|
||||
Status: PackRunStatus.Succeeded,
|
||||
Priority: 5,
|
||||
Attempt: 2,
|
||||
MaxAttempts: 3,
|
||||
Parameters: "{\"key\":\"value\"}",
|
||||
ParametersDigest: "sha256:abc",
|
||||
IdempotencyKey: "idem-1",
|
||||
CorrelationId: "corr-1",
|
||||
LeaseId: null,
|
||||
TaskRunnerId: "runner-1",
|
||||
LeaseUntil: null,
|
||||
CreatedAt: now.AddMinutes(-10),
|
||||
ScheduledAt: now.AddMinutes(-8),
|
||||
LeasedAt: now.AddMinutes(-6),
|
||||
StartedAt: startedAt,
|
||||
CompletedAt: completedAt,
|
||||
NotBefore: null,
|
||||
Reason: "Completed successfully",
|
||||
ExitCode: 0,
|
||||
DurationMs: 300000,
|
||||
CreatedBy: "user@example.com",
|
||||
Metadata: null);
|
||||
|
||||
var response = PackRunResponse.FromDomain(packRun);
|
||||
|
||||
Assert.Equal(packRunId, response.PackRunId);
|
||||
Assert.Equal("pack-alpha", response.PackId);
|
||||
Assert.Equal("1.2.3", response.PackVersion);
|
||||
Assert.Equal("succeeded", response.Status);
|
||||
Assert.Equal(5, response.Priority);
|
||||
Assert.Equal(2, response.Attempt);
|
||||
Assert.Equal(3, response.MaxAttempts);
|
||||
Assert.Equal("corr-1", response.CorrelationId);
|
||||
Assert.Equal("runner-1", response.TaskRunnerId);
|
||||
Assert.Equal(now.AddMinutes(-10), response.CreatedAt);
|
||||
Assert.Equal(now.AddMinutes(-8), response.ScheduledAt);
|
||||
Assert.Equal(startedAt, response.StartedAt);
|
||||
Assert.Equal(completedAt, response.CompletedAt);
|
||||
Assert.Equal("Completed successfully", response.Reason);
|
||||
Assert.Equal(0, response.ExitCode);
|
||||
Assert.Equal(300000, response.DurationMs);
|
||||
Assert.Equal("user@example.com", response.CreatedBy);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackRunStatus.Pending, "pending")]
|
||||
[InlineData(PackRunStatus.Scheduled, "scheduled")]
|
||||
[InlineData(PackRunStatus.Leased, "leased")]
|
||||
[InlineData(PackRunStatus.Running, "running")]
|
||||
[InlineData(PackRunStatus.Succeeded, "succeeded")]
|
||||
[InlineData(PackRunStatus.Failed, "failed")]
|
||||
[InlineData(PackRunStatus.Canceled, "canceled")]
|
||||
[InlineData(PackRunStatus.TimedOut, "timedout")]
|
||||
public void PackRunResponse_FromDomain_StatusIsLowercase(PackRunStatus status, string expectedStatusString)
|
||||
{
|
||||
var packRun = CreatePackRunWithStatus(status);
|
||||
var response = PackRunResponse.FromDomain(packRun);
|
||||
|
||||
Assert.Equal(expectedStatusString, response.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LogEntryResponse_FromDomain_MapsAllFields()
|
||||
{
|
||||
var logId = Guid.NewGuid();
|
||||
var packRunId = Guid.NewGuid();
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var log = new PackRunLog(
|
||||
LogId: logId,
|
||||
TenantId: "tenant-1",
|
||||
PackRunId: packRunId,
|
||||
Sequence: 42,
|
||||
Level: LogLevel.Warn,
|
||||
Source: "stderr",
|
||||
Message: "Warning: something happened",
|
||||
Timestamp: now,
|
||||
Data: "{\"details\":true}");
|
||||
|
||||
var response = LogEntryResponse.FromDomain(log);
|
||||
|
||||
Assert.Equal(logId, response.LogId);
|
||||
Assert.Equal(42, response.Sequence);
|
||||
Assert.Equal("warn", response.Level);
|
||||
Assert.Equal("stderr", response.Source);
|
||||
Assert.Equal("Warning: something happened", response.Message);
|
||||
Assert.Equal(now, response.Timestamp);
|
||||
Assert.Equal("{\"details\":true}", response.Data);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(LogLevel.Trace, "trace")]
|
||||
[InlineData(LogLevel.Debug, "debug")]
|
||||
[InlineData(LogLevel.Info, "info")]
|
||||
[InlineData(LogLevel.Warn, "warn")]
|
||||
[InlineData(LogLevel.Error, "error")]
|
||||
[InlineData(LogLevel.Fatal, "fatal")]
|
||||
public void LogEntryResponse_FromDomain_LevelIsLowercase(LogLevel level, string expectedLevelString)
|
||||
{
|
||||
var log = new PackRunLog(
|
||||
LogId: Guid.NewGuid(),
|
||||
TenantId: "t1",
|
||||
PackRunId: Guid.NewGuid(),
|
||||
Sequence: 0,
|
||||
Level: level,
|
||||
Source: "test",
|
||||
Message: "test",
|
||||
Timestamp: DateTimeOffset.UtcNow,
|
||||
Data: null);
|
||||
|
||||
var response = LogEntryResponse.FromDomain(log);
|
||||
|
||||
Assert.Equal(expectedLevelString, response.Level);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SchedulePackRunRequest_AllFieldsAccessible()
|
||||
{
|
||||
var request = new SchedulePackRunRequest(
|
||||
PackId: "pack-1",
|
||||
PackVersion: "2.0.0",
|
||||
Parameters: "{\"param\":1}",
|
||||
ProjectId: "proj-1",
|
||||
IdempotencyKey: "key-1",
|
||||
CorrelationId: "corr-1",
|
||||
Priority: 10,
|
||||
MaxAttempts: 5,
|
||||
Metadata: "{\"source\":\"api\"}");
|
||||
|
||||
Assert.Equal("pack-1", request.PackId);
|
||||
Assert.Equal("2.0.0", request.PackVersion);
|
||||
Assert.Equal("{\"param\":1}", request.Parameters);
|
||||
Assert.Equal("proj-1", request.ProjectId);
|
||||
Assert.Equal("key-1", request.IdempotencyKey);
|
||||
Assert.Equal("corr-1", request.CorrelationId);
|
||||
Assert.Equal(10, request.Priority);
|
||||
Assert.Equal(5, request.MaxAttempts);
|
||||
Assert.Equal("{\"source\":\"api\"}", request.Metadata);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ClaimPackRunRequest_AllFieldsAccessible()
|
||||
{
|
||||
var request = new ClaimPackRunRequest(
|
||||
TaskRunnerId: "runner-1",
|
||||
PackId: "pack-filter",
|
||||
LeaseSeconds: 600,
|
||||
IdempotencyKey: "claim-key-1");
|
||||
|
||||
Assert.Equal("runner-1", request.TaskRunnerId);
|
||||
Assert.Equal("pack-filter", request.PackId);
|
||||
Assert.Equal(600, request.LeaseSeconds);
|
||||
Assert.Equal("claim-key-1", request.IdempotencyKey);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CompletePackRunRequest_AllFieldsAccessible()
|
||||
{
|
||||
var artifacts = new List<PackRunArtifactRequest>
|
||||
{
|
||||
new("report", "s3://bucket/report.json", "sha256:abc", "application/json", 1024, null),
|
||||
new("log", "s3://bucket/log.txt", "sha256:def", "text/plain", 2048, "{\"lines\":500}")
|
||||
};
|
||||
|
||||
var request = new CompletePackRunRequest(
|
||||
LeaseId: Guid.NewGuid(),
|
||||
Success: true,
|
||||
ExitCode: 0,
|
||||
Reason: "All tests passed",
|
||||
Artifacts: artifacts);
|
||||
|
||||
Assert.True(request.Success);
|
||||
Assert.Equal(0, request.ExitCode);
|
||||
Assert.Equal("All tests passed", request.Reason);
|
||||
Assert.NotNull(request.Artifacts);
|
||||
Assert.Equal(2, request.Artifacts.Count);
|
||||
Assert.Equal("report", request.Artifacts[0].ArtifactType);
|
||||
Assert.Equal("log", request.Artifacts[1].ArtifactType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PackRunErrorResponse_AllFieldsAccessible()
|
||||
{
|
||||
var packRunId = Guid.NewGuid();
|
||||
var error = new PackRunErrorResponse(
|
||||
Code: "lease_expired",
|
||||
Message: "The lease has expired",
|
||||
PackRunId: packRunId,
|
||||
RetryAfterSeconds: 30);
|
||||
|
||||
Assert.Equal("lease_expired", error.Code);
|
||||
Assert.Equal("The lease has expired", error.Message);
|
||||
Assert.Equal(packRunId, error.PackRunId);
|
||||
Assert.Equal(30, error.RetryAfterSeconds);
|
||||
}
|
||||
|
||||
private static Core.Domain.PackRun CreatePackRunWithStatus(PackRunStatus status)
|
||||
{
|
||||
return new Core.Domain.PackRun(
|
||||
PackRunId: Guid.NewGuid(),
|
||||
TenantId: "t1",
|
||||
ProjectId: null,
|
||||
PackId: "pack",
|
||||
PackVersion: "1.0.0",
|
||||
Status: status,
|
||||
Priority: 0,
|
||||
Attempt: 1,
|
||||
MaxAttempts: 3,
|
||||
Parameters: "{}",
|
||||
ParametersDigest: "abc",
|
||||
IdempotencyKey: "key",
|
||||
CorrelationId: null,
|
||||
LeaseId: null,
|
||||
TaskRunnerId: null,
|
||||
LeaseUntil: null,
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
ScheduledAt: null,
|
||||
LeasedAt: null,
|
||||
StartedAt: null,
|
||||
CompletedAt: null,
|
||||
NotBefore: null,
|
||||
Reason: null,
|
||||
ExitCode: null,
|
||||
DurationMs: null,
|
||||
CreatedBy: "system",
|
||||
Metadata: null);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,217 @@
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.PackRun;
|
||||
|
||||
public sealed class PackRunLogTests
|
||||
{
|
||||
private const string TestTenantId = "tenant-test";
|
||||
private readonly Guid _packRunId = Guid.NewGuid();
|
||||
|
||||
[Fact]
|
||||
public void Create_InitializesAllFields()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var log = PackRunLog.Create(
|
||||
packRunId: _packRunId,
|
||||
tenantId: TestTenantId,
|
||||
sequence: 5,
|
||||
level: LogLevel.Info,
|
||||
source: "stdout",
|
||||
message: "Test message",
|
||||
data: "{\"key\":\"value\"}",
|
||||
timestamp: now);
|
||||
|
||||
Assert.NotEqual(Guid.Empty, log.LogId);
|
||||
Assert.Equal(TestTenantId, log.TenantId);
|
||||
Assert.Equal(_packRunId, log.PackRunId);
|
||||
Assert.Equal(5, log.Sequence);
|
||||
Assert.Equal(LogLevel.Info, log.Level);
|
||||
Assert.Equal("stdout", log.Source);
|
||||
Assert.Equal("Test message", log.Message);
|
||||
Assert.Equal(now, log.Timestamp);
|
||||
Assert.Equal("{\"key\":\"value\"}", log.Data);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_WithNullTimestamp_UsesUtcNow()
|
||||
{
|
||||
var beforeCreate = DateTimeOffset.UtcNow;
|
||||
|
||||
var log = PackRunLog.Create(
|
||||
packRunId: _packRunId,
|
||||
tenantId: TestTenantId,
|
||||
sequence: 0,
|
||||
level: LogLevel.Debug,
|
||||
source: "test",
|
||||
message: "Test");
|
||||
|
||||
var afterCreate = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.True(log.Timestamp >= beforeCreate);
|
||||
Assert.True(log.Timestamp <= afterCreate);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Stdout_CreatesInfoLevelStdoutLog()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var log = PackRunLog.Stdout(_packRunId, TestTenantId, 10, "Hello stdout", now);
|
||||
|
||||
Assert.Equal(LogLevel.Info, log.Level);
|
||||
Assert.Equal("stdout", log.Source);
|
||||
Assert.Equal("Hello stdout", log.Message);
|
||||
Assert.Equal(10, log.Sequence);
|
||||
Assert.Null(log.Data);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Stderr_CreatesWarnLevelStderrLog()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var log = PackRunLog.Stderr(_packRunId, TestTenantId, 20, "Warning message", now);
|
||||
|
||||
Assert.Equal(LogLevel.Warn, log.Level);
|
||||
Assert.Equal("stderr", log.Source);
|
||||
Assert.Equal("Warning message", log.Message);
|
||||
Assert.Equal(20, log.Sequence);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void System_CreatesSystemSourceLog()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var log = PackRunLog.System(_packRunId, TestTenantId, 30, LogLevel.Error, "System error", "{\"code\":500}", now);
|
||||
|
||||
Assert.Equal(LogLevel.Error, log.Level);
|
||||
Assert.Equal("system", log.Source);
|
||||
Assert.Equal("System error", log.Message);
|
||||
Assert.Equal("{\"code\":500}", log.Data);
|
||||
Assert.Equal(30, log.Sequence);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(LogLevel.Trace, 0)]
|
||||
[InlineData(LogLevel.Debug, 1)]
|
||||
[InlineData(LogLevel.Info, 2)]
|
||||
[InlineData(LogLevel.Warn, 3)]
|
||||
[InlineData(LogLevel.Error, 4)]
|
||||
[InlineData(LogLevel.Fatal, 5)]
|
||||
public void LogLevel_HasCorrectOrdinalValues(LogLevel level, int expectedValue)
|
||||
{
|
||||
Assert.Equal(expectedValue, (int)level);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class PackRunLogBatchTests
|
||||
{
|
||||
private const string TestTenantId = "tenant-test";
|
||||
private readonly Guid _packRunId = Guid.NewGuid();
|
||||
|
||||
[Fact]
|
||||
public void FromLogs_EmptyList_ReturnsEmptyBatch()
|
||||
{
|
||||
var batch = PackRunLogBatch.FromLogs(_packRunId, TestTenantId, []);
|
||||
|
||||
Assert.Equal(_packRunId, batch.PackRunId);
|
||||
Assert.Equal(TestTenantId, batch.TenantId);
|
||||
Assert.Equal(0, batch.StartSequence);
|
||||
Assert.Empty(batch.Logs);
|
||||
Assert.Equal(0, batch.NextSequence);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromLogs_WithLogs_SetsCorrectStartSequence()
|
||||
{
|
||||
var logs = new List<PackRunLog>
|
||||
{
|
||||
PackRunLog.Create(_packRunId, TestTenantId, 5, LogLevel.Info, "src", "msg1"),
|
||||
PackRunLog.Create(_packRunId, TestTenantId, 6, LogLevel.Info, "src", "msg2"),
|
||||
PackRunLog.Create(_packRunId, TestTenantId, 7, LogLevel.Info, "src", "msg3")
|
||||
};
|
||||
|
||||
var batch = PackRunLogBatch.FromLogs(_packRunId, TestTenantId, logs);
|
||||
|
||||
Assert.Equal(5, batch.StartSequence);
|
||||
Assert.Equal(3, batch.Logs.Count);
|
||||
Assert.Equal(8, batch.NextSequence); // StartSequence + Count
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NextSequence_CalculatesCorrectly()
|
||||
{
|
||||
var batch = new PackRunLogBatch(
|
||||
PackRunId: _packRunId,
|
||||
TenantId: TestTenantId,
|
||||
StartSequence: 100,
|
||||
Logs:
|
||||
[
|
||||
PackRunLog.Create(_packRunId, TestTenantId, 100, LogLevel.Info, "src", "msg1"),
|
||||
PackRunLog.Create(_packRunId, TestTenantId, 101, LogLevel.Info, "src", "msg2")
|
||||
]);
|
||||
|
||||
Assert.Equal(102, batch.NextSequence);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class PackRunLogCursorTests
|
||||
{
|
||||
private readonly Guid _packRunId = Guid.NewGuid();
|
||||
|
||||
[Fact]
|
||||
public void Start_CreatesInitialCursor()
|
||||
{
|
||||
var cursor = PackRunLogCursor.Start(_packRunId);
|
||||
|
||||
Assert.Equal(_packRunId, cursor.PackRunId);
|
||||
Assert.Equal(-1, cursor.LastSequence);
|
||||
Assert.False(cursor.IsComplete);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resume_CreatesCursorFromSequence()
|
||||
{
|
||||
var cursor = PackRunLogCursor.Resume(_packRunId, 50);
|
||||
|
||||
Assert.Equal(_packRunId, cursor.PackRunId);
|
||||
Assert.Equal(50, cursor.LastSequence);
|
||||
Assert.False(cursor.IsComplete);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Complete_MarksCursorAsComplete()
|
||||
{
|
||||
var cursor = PackRunLogCursor.Start(_packRunId);
|
||||
var completed = cursor.Complete();
|
||||
|
||||
Assert.True(completed.IsComplete);
|
||||
Assert.Equal(cursor.PackRunId, completed.PackRunId);
|
||||
Assert.Equal(cursor.LastSequence, completed.LastSequence);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Advance_UpdatesLastSequence()
|
||||
{
|
||||
var cursor = PackRunLogCursor.Start(_packRunId);
|
||||
var advanced = cursor.Advance(100);
|
||||
|
||||
Assert.Equal(100, advanced.LastSequence);
|
||||
Assert.False(advanced.IsComplete);
|
||||
Assert.Equal(cursor.PackRunId, advanced.PackRunId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Advance_ThenComplete_WorksCorrectly()
|
||||
{
|
||||
var cursor = PackRunLogCursor.Start(_packRunId)
|
||||
.Advance(50)
|
||||
.Advance(100)
|
||||
.Complete();
|
||||
|
||||
Assert.Equal(100, cursor.LastSequence);
|
||||
Assert.True(cursor.IsComplete);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,202 @@
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.PackRun;
|
||||
|
||||
public sealed class PackRunTests
|
||||
{
|
||||
private const string TestTenantId = "tenant-test";
|
||||
private const string TestPackId = "pack-alpha";
|
||||
private const string TestPackVersion = "1.0.0";
|
||||
private const string TestParameters = "{\"key\":\"value\"}";
|
||||
private const string TestParametersDigest = "abc123def456";
|
||||
private const string TestIdempotencyKey = "idem-key-001";
|
||||
private const string TestCreatedBy = "system";
|
||||
|
||||
[Fact]
|
||||
public void Create_InitializesWithCorrectDefaults()
|
||||
{
|
||||
var packRunId = Guid.NewGuid();
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var packRun = Core.Domain.PackRun.Create(
|
||||
packRunId: packRunId,
|
||||
tenantId: TestTenantId,
|
||||
projectId: "proj-1",
|
||||
packId: TestPackId,
|
||||
packVersion: TestPackVersion,
|
||||
parameters: TestParameters,
|
||||
parametersDigest: TestParametersDigest,
|
||||
idempotencyKey: TestIdempotencyKey,
|
||||
correlationId: "corr-123",
|
||||
createdBy: TestCreatedBy,
|
||||
priority: 5,
|
||||
maxAttempts: 3,
|
||||
metadata: "{\"source\":\"test\"}",
|
||||
createdAt: now);
|
||||
|
||||
Assert.Equal(packRunId, packRun.PackRunId);
|
||||
Assert.Equal(TestTenantId, packRun.TenantId);
|
||||
Assert.Equal("proj-1", packRun.ProjectId);
|
||||
Assert.Equal(TestPackId, packRun.PackId);
|
||||
Assert.Equal(TestPackVersion, packRun.PackVersion);
|
||||
Assert.Equal(PackRunStatus.Pending, packRun.Status);
|
||||
Assert.Equal(5, packRun.Priority);
|
||||
Assert.Equal(1, packRun.Attempt);
|
||||
Assert.Equal(3, packRun.MaxAttempts);
|
||||
Assert.Equal(TestParameters, packRun.Parameters);
|
||||
Assert.Equal(TestParametersDigest, packRun.ParametersDigest);
|
||||
Assert.Equal(TestIdempotencyKey, packRun.IdempotencyKey);
|
||||
Assert.Equal("corr-123", packRun.CorrelationId);
|
||||
Assert.Null(packRun.LeaseId);
|
||||
Assert.Null(packRun.TaskRunnerId);
|
||||
Assert.Null(packRun.LeaseUntil);
|
||||
Assert.Equal(now, packRun.CreatedAt);
|
||||
Assert.Null(packRun.ScheduledAt);
|
||||
Assert.Null(packRun.LeasedAt);
|
||||
Assert.Null(packRun.StartedAt);
|
||||
Assert.Null(packRun.CompletedAt);
|
||||
Assert.Null(packRun.NotBefore);
|
||||
Assert.Null(packRun.Reason);
|
||||
Assert.Null(packRun.ExitCode);
|
||||
Assert.Null(packRun.DurationMs);
|
||||
Assert.Equal(TestCreatedBy, packRun.CreatedBy);
|
||||
Assert.Equal("{\"source\":\"test\"}", packRun.Metadata);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_WithDefaultPriorityAndMaxAttempts()
|
||||
{
|
||||
var packRun = Core.Domain.PackRun.Create(
|
||||
packRunId: Guid.NewGuid(),
|
||||
tenantId: TestTenantId,
|
||||
projectId: null,
|
||||
packId: TestPackId,
|
||||
packVersion: TestPackVersion,
|
||||
parameters: TestParameters,
|
||||
parametersDigest: TestParametersDigest,
|
||||
idempotencyKey: TestIdempotencyKey,
|
||||
correlationId: null,
|
||||
createdBy: TestCreatedBy);
|
||||
|
||||
Assert.Equal(0, packRun.Priority);
|
||||
Assert.Equal(3, packRun.MaxAttempts);
|
||||
Assert.Null(packRun.ProjectId);
|
||||
Assert.Null(packRun.CorrelationId);
|
||||
Assert.Null(packRun.Metadata);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackRunStatus.Succeeded, true)]
|
||||
[InlineData(PackRunStatus.Failed, true)]
|
||||
[InlineData(PackRunStatus.Canceled, true)]
|
||||
[InlineData(PackRunStatus.TimedOut, true)]
|
||||
[InlineData(PackRunStatus.Pending, false)]
|
||||
[InlineData(PackRunStatus.Scheduled, false)]
|
||||
[InlineData(PackRunStatus.Leased, false)]
|
||||
[InlineData(PackRunStatus.Running, false)]
|
||||
public void IsTerminal_ReturnsCorrectValue(PackRunStatus status, bool expectedIsTerminal)
|
||||
{
|
||||
var packRun = CreatePackRunWithStatus(status);
|
||||
Assert.Equal(expectedIsTerminal, packRun.IsTerminal);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackRunStatus.Failed, 1, 3, true)] // First attempt, can retry
|
||||
[InlineData(PackRunStatus.Failed, 2, 3, true)] // Second attempt, can retry
|
||||
[InlineData(PackRunStatus.Failed, 3, 3, false)] // Third attempt, max reached
|
||||
[InlineData(PackRunStatus.Succeeded, 1, 3, false)] // Succeeded, no retry
|
||||
[InlineData(PackRunStatus.Canceled, 1, 3, false)] // Canceled, no retry
|
||||
[InlineData(PackRunStatus.Running, 1, 3, false)] // Not failed, no retry
|
||||
public void CanRetry_ReturnsCorrectValue(PackRunStatus status, int attempt, int maxAttempts, bool expectedCanRetry)
|
||||
{
|
||||
var packRun = CreatePackRunWithStatusAndAttempts(status, attempt, maxAttempts);
|
||||
Assert.Equal(expectedCanRetry, packRun.CanRetry);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_WithNullCreatedAt_UsesUtcNow()
|
||||
{
|
||||
var beforeCreate = DateTimeOffset.UtcNow;
|
||||
|
||||
var packRun = Core.Domain.PackRun.Create(
|
||||
packRunId: Guid.NewGuid(),
|
||||
tenantId: TestTenantId,
|
||||
projectId: null,
|
||||
packId: TestPackId,
|
||||
packVersion: TestPackVersion,
|
||||
parameters: TestParameters,
|
||||
parametersDigest: TestParametersDigest,
|
||||
idempotencyKey: TestIdempotencyKey,
|
||||
correlationId: null,
|
||||
createdBy: TestCreatedBy);
|
||||
|
||||
var afterCreate = DateTimeOffset.UtcNow;
|
||||
|
||||
Assert.True(packRun.CreatedAt >= beforeCreate);
|
||||
Assert.True(packRun.CreatedAt <= afterCreate);
|
||||
}
|
||||
|
||||
private static Core.Domain.PackRun CreatePackRunWithStatus(PackRunStatus status)
|
||||
{
|
||||
return new Core.Domain.PackRun(
|
||||
PackRunId: Guid.NewGuid(),
|
||||
TenantId: TestTenantId,
|
||||
ProjectId: null,
|
||||
PackId: TestPackId,
|
||||
PackVersion: TestPackVersion,
|
||||
Status: status,
|
||||
Priority: 0,
|
||||
Attempt: 1,
|
||||
MaxAttempts: 3,
|
||||
Parameters: TestParameters,
|
||||
ParametersDigest: TestParametersDigest,
|
||||
IdempotencyKey: TestIdempotencyKey,
|
||||
CorrelationId: null,
|
||||
LeaseId: null,
|
||||
TaskRunnerId: null,
|
||||
LeaseUntil: null,
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
ScheduledAt: null,
|
||||
LeasedAt: null,
|
||||
StartedAt: null,
|
||||
CompletedAt: null,
|
||||
NotBefore: null,
|
||||
Reason: null,
|
||||
ExitCode: null,
|
||||
DurationMs: null,
|
||||
CreatedBy: TestCreatedBy,
|
||||
Metadata: null);
|
||||
}
|
||||
|
||||
private static Core.Domain.PackRun CreatePackRunWithStatusAndAttempts(PackRunStatus status, int attempt, int maxAttempts)
|
||||
{
|
||||
return new Core.Domain.PackRun(
|
||||
PackRunId: Guid.NewGuid(),
|
||||
TenantId: TestTenantId,
|
||||
ProjectId: null,
|
||||
PackId: TestPackId,
|
||||
PackVersion: TestPackVersion,
|
||||
Status: status,
|
||||
Priority: 0,
|
||||
Attempt: attempt,
|
||||
MaxAttempts: maxAttempts,
|
||||
Parameters: TestParameters,
|
||||
ParametersDigest: TestParametersDigest,
|
||||
IdempotencyKey: TestIdempotencyKey,
|
||||
CorrelationId: null,
|
||||
LeaseId: null,
|
||||
TaskRunnerId: null,
|
||||
LeaseUntil: null,
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
ScheduledAt: null,
|
||||
LeasedAt: null,
|
||||
StartedAt: null,
|
||||
CompletedAt: null,
|
||||
NotBefore: null,
|
||||
Reason: null,
|
||||
ExitCode: null,
|
||||
DurationMs: null,
|
||||
CreatedBy: TestCreatedBy,
|
||||
Metadata: null);
|
||||
}
|
||||
}
|
||||
@@ -122,9 +122,14 @@
|
||||
|
||||
|
||||
<ProjectReference Include="..\StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj"/>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<ProjectReference Include="..\StellaOps.Orchestrator.WebService\StellaOps.Orchestrator.WebService.csproj"/>
|
||||
|
||||
|
||||
|
||||
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -0,0 +1,338 @@
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
namespace StellaOps.Orchestrator.WebService.Contracts;
|
||||
|
||||
// ========== Scheduling Requests/Responses ==========
|
||||
|
||||
/// <summary>
|
||||
/// Request to schedule a new pack run.
|
||||
/// </summary>
|
||||
public sealed record SchedulePackRunRequest(
|
||||
/// <summary>Authority pack ID to execute.</summary>
|
||||
string PackId,
|
||||
|
||||
/// <summary>Pack version (e.g., "1.2.3", "latest").</summary>
|
||||
string PackVersion,
|
||||
|
||||
/// <summary>Pack input parameters JSON.</summary>
|
||||
string? Parameters,
|
||||
|
||||
/// <summary>Optional project scope.</summary>
|
||||
string? ProjectId,
|
||||
|
||||
/// <summary>Idempotency key for deduplication.</summary>
|
||||
string? IdempotencyKey,
|
||||
|
||||
/// <summary>Correlation ID for tracing.</summary>
|
||||
string? CorrelationId,
|
||||
|
||||
/// <summary>Priority (higher = more urgent).</summary>
|
||||
int? Priority,
|
||||
|
||||
/// <summary>Maximum retry attempts.</summary>
|
||||
int? MaxAttempts,
|
||||
|
||||
/// <summary>Optional metadata JSON.</summary>
|
||||
string? Metadata);
|
||||
|
||||
/// <summary>
|
||||
/// Response for a scheduled pack run.
|
||||
/// </summary>
|
||||
public sealed record SchedulePackRunResponse(
|
||||
Guid PackRunId,
|
||||
string PackId,
|
||||
string PackVersion,
|
||||
string Status,
|
||||
string IdempotencyKey,
|
||||
DateTimeOffset CreatedAt,
|
||||
bool WasAlreadyScheduled);
|
||||
|
||||
/// <summary>
|
||||
/// Response representing a pack run.
|
||||
/// </summary>
|
||||
public sealed record PackRunResponse(
|
||||
Guid PackRunId,
|
||||
string PackId,
|
||||
string PackVersion,
|
||||
string Status,
|
||||
int Priority,
|
||||
int Attempt,
|
||||
int MaxAttempts,
|
||||
string? CorrelationId,
|
||||
string? TaskRunnerId,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset? ScheduledAt,
|
||||
DateTimeOffset? StartedAt,
|
||||
DateTimeOffset? CompletedAt,
|
||||
string? Reason,
|
||||
int? ExitCode,
|
||||
long? DurationMs,
|
||||
string CreatedBy)
|
||||
{
|
||||
public static PackRunResponse FromDomain(PackRun packRun) => new(
|
||||
packRun.PackRunId,
|
||||
packRun.PackId,
|
||||
packRun.PackVersion,
|
||||
packRun.Status.ToString().ToLowerInvariant(),
|
||||
packRun.Priority,
|
||||
packRun.Attempt,
|
||||
packRun.MaxAttempts,
|
||||
packRun.CorrelationId,
|
||||
packRun.TaskRunnerId,
|
||||
packRun.CreatedAt,
|
||||
packRun.ScheduledAt,
|
||||
packRun.StartedAt,
|
||||
packRun.CompletedAt,
|
||||
packRun.Reason,
|
||||
packRun.ExitCode,
|
||||
packRun.DurationMs,
|
||||
packRun.CreatedBy);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response containing a list of pack runs.
|
||||
/// </summary>
|
||||
public sealed record PackRunListResponse(
|
||||
IReadOnlyList<PackRunResponse> PackRuns,
|
||||
int TotalCount,
|
||||
string? NextCursor);
|
||||
|
||||
// ========== Task Runner (Worker) Requests/Responses ==========
|
||||
|
||||
/// <summary>
|
||||
/// Request to claim a pack run for execution.
|
||||
/// </summary>
|
||||
public sealed record ClaimPackRunRequest(
|
||||
/// <summary>Task runner ID claiming the pack run.</summary>
|
||||
string TaskRunnerId,
|
||||
|
||||
/// <summary>Optional pack ID filter (only claim runs for this pack).</summary>
|
||||
string? PackId,
|
||||
|
||||
/// <summary>Requested lease duration in seconds.</summary>
|
||||
int? LeaseSeconds,
|
||||
|
||||
/// <summary>Idempotency key for claim deduplication.</summary>
|
||||
string? IdempotencyKey);
|
||||
|
||||
/// <summary>
|
||||
/// Response for a claimed pack run.
|
||||
/// </summary>
|
||||
public sealed record ClaimPackRunResponse(
|
||||
Guid PackRunId,
|
||||
Guid LeaseId,
|
||||
string PackId,
|
||||
string PackVersion,
|
||||
string Parameters,
|
||||
string ParametersDigest,
|
||||
int Attempt,
|
||||
int MaxAttempts,
|
||||
DateTimeOffset LeaseUntil,
|
||||
string IdempotencyKey,
|
||||
string? CorrelationId,
|
||||
string? ProjectId,
|
||||
string? Metadata);
|
||||
|
||||
/// <summary>
|
||||
/// Request to extend a pack run lease (heartbeat).
|
||||
/// </summary>
|
||||
public sealed record PackRunHeartbeatRequest(
|
||||
/// <summary>Current lease ID.</summary>
|
||||
Guid LeaseId,
|
||||
|
||||
/// <summary>Lease extension in seconds.</summary>
|
||||
int? ExtendSeconds);
|
||||
|
||||
/// <summary>
|
||||
/// Response for a pack run heartbeat.
|
||||
/// </summary>
|
||||
public sealed record PackRunHeartbeatResponse(
|
||||
Guid PackRunId,
|
||||
Guid LeaseId,
|
||||
DateTimeOffset LeaseUntil,
|
||||
bool Acknowledged);
|
||||
|
||||
/// <summary>
|
||||
/// Request to report pack run start.
|
||||
/// </summary>
|
||||
public sealed record PackRunStartRequest(
|
||||
/// <summary>Current lease ID.</summary>
|
||||
Guid LeaseId);
|
||||
|
||||
/// <summary>
|
||||
/// Response for pack run start.
|
||||
/// </summary>
|
||||
public sealed record PackRunStartResponse(
|
||||
Guid PackRunId,
|
||||
bool Acknowledged,
|
||||
DateTimeOffset StartedAt);
|
||||
|
||||
/// <summary>
|
||||
/// Request to complete a pack run.
|
||||
/// </summary>
|
||||
public sealed record CompletePackRunRequest(
|
||||
/// <summary>Current lease ID.</summary>
|
||||
Guid LeaseId,
|
||||
|
||||
/// <summary>Whether the pack run succeeded (exit code 0).</summary>
|
||||
bool Success,
|
||||
|
||||
/// <summary>Exit code from pack execution.</summary>
|
||||
int ExitCode,
|
||||
|
||||
/// <summary>Reason for failure/success.</summary>
|
||||
string? Reason,
|
||||
|
||||
/// <summary>Artifacts produced by the pack run.</summary>
|
||||
IReadOnlyList<PackRunArtifactRequest>? Artifacts);
|
||||
|
||||
/// <summary>
|
||||
/// Artifact metadata for pack run completion.
|
||||
/// </summary>
|
||||
public sealed record PackRunArtifactRequest(
|
||||
/// <summary>Artifact type (e.g., "report", "log", "manifest").</summary>
|
||||
string ArtifactType,
|
||||
|
||||
/// <summary>Storage URI.</summary>
|
||||
string Uri,
|
||||
|
||||
/// <summary>Content digest (SHA-256).</summary>
|
||||
string Digest,
|
||||
|
||||
/// <summary>MIME type.</summary>
|
||||
string? MimeType,
|
||||
|
||||
/// <summary>Size in bytes.</summary>
|
||||
long? SizeBytes,
|
||||
|
||||
/// <summary>Optional metadata JSON.</summary>
|
||||
string? Metadata);
|
||||
|
||||
/// <summary>
|
||||
/// Response for pack run completion.
|
||||
/// </summary>
|
||||
public sealed record CompletePackRunResponse(
|
||||
Guid PackRunId,
|
||||
string Status,
|
||||
DateTimeOffset CompletedAt,
|
||||
IReadOnlyList<Guid> ArtifactIds,
|
||||
long DurationMs);
|
||||
|
||||
// ========== Log Requests/Responses ==========
|
||||
|
||||
/// <summary>
|
||||
/// Request to append logs to a pack run.
|
||||
/// </summary>
|
||||
public sealed record AppendLogsRequest(
|
||||
/// <summary>Current lease ID.</summary>
|
||||
Guid LeaseId,
|
||||
|
||||
/// <summary>Log entries to append.</summary>
|
||||
IReadOnlyList<LogEntryRequest> Logs);
|
||||
|
||||
/// <summary>
|
||||
/// A single log entry to append.
|
||||
/// </summary>
|
||||
public sealed record LogEntryRequest(
|
||||
/// <summary>Log level (trace, debug, info, warn, error, fatal).</summary>
|
||||
string Level,
|
||||
|
||||
/// <summary>Log source (stdout, stderr, system, pack).</summary>
|
||||
string Source,
|
||||
|
||||
/// <summary>Log message.</summary>
|
||||
string Message,
|
||||
|
||||
/// <summary>Timestamp (defaults to server time if not provided).</summary>
|
||||
DateTimeOffset? Timestamp,
|
||||
|
||||
/// <summary>Optional structured data JSON.</summary>
|
||||
string? Data);
|
||||
|
||||
/// <summary>
|
||||
/// Response for appending logs.
|
||||
/// </summary>
|
||||
public sealed record AppendLogsResponse(
|
||||
Guid PackRunId,
|
||||
int LogsAppended,
|
||||
long LatestSequence);
|
||||
|
||||
/// <summary>
|
||||
/// Response for a log entry.
|
||||
/// </summary>
|
||||
public sealed record LogEntryResponse(
|
||||
Guid LogId,
|
||||
long Sequence,
|
||||
string Level,
|
||||
string Source,
|
||||
string Message,
|
||||
DateTimeOffset Timestamp,
|
||||
string? Data)
|
||||
{
|
||||
public static LogEntryResponse FromDomain(PackRunLog log) => new(
|
||||
log.LogId,
|
||||
log.Sequence,
|
||||
log.Level.ToString().ToLowerInvariant(),
|
||||
log.Source,
|
||||
log.Message,
|
||||
log.Timestamp,
|
||||
log.Data);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response containing a batch of logs.
|
||||
/// </summary>
|
||||
public sealed record LogBatchResponse(
|
||||
Guid PackRunId,
|
||||
IReadOnlyList<LogEntryResponse> Logs,
|
||||
long StartSequence,
|
||||
long? NextSequence,
|
||||
bool HasMore);
|
||||
|
||||
// ========== Cancel/Retry Requests ==========
|
||||
|
||||
/// <summary>
|
||||
/// Request to cancel a pack run.
|
||||
/// </summary>
|
||||
public sealed record CancelPackRunRequest(
|
||||
/// <summary>Reason for cancellation.</summary>
|
||||
string Reason);
|
||||
|
||||
/// <summary>
|
||||
/// Response for cancel operation.
|
||||
/// </summary>
|
||||
public sealed record CancelPackRunResponse(
|
||||
Guid PackRunId,
|
||||
string Status,
|
||||
string Reason,
|
||||
DateTimeOffset CanceledAt);
|
||||
|
||||
/// <summary>
|
||||
/// Request to retry a failed pack run.
|
||||
/// </summary>
|
||||
public sealed record RetryPackRunRequest(
|
||||
/// <summary>Override parameters for retry (optional).</summary>
|
||||
string? Parameters,
|
||||
|
||||
/// <summary>New idempotency key for the retry.</summary>
|
||||
string? IdempotencyKey);
|
||||
|
||||
/// <summary>
|
||||
/// Response for retry operation.
|
||||
/// </summary>
|
||||
public sealed record RetryPackRunResponse(
|
||||
Guid OriginalPackRunId,
|
||||
Guid NewPackRunId,
|
||||
string Status,
|
||||
DateTimeOffset CreatedAt);
|
||||
|
||||
// ========== Error Response ==========
|
||||
|
||||
/// <summary>
|
||||
/// Error response for pack run operations.
|
||||
/// </summary>
|
||||
public sealed record PackRunErrorResponse(
|
||||
string Code,
|
||||
string Message,
|
||||
Guid? PackRunId,
|
||||
int? RetryAfterSeconds);
|
||||
@@ -0,0 +1,381 @@
|
||||
using Microsoft.AspNetCore.Http.HttpResults;
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
using StellaOps.Orchestrator.Core.Domain.Export;
|
||||
using StellaOps.Orchestrator.Core.Services;
|
||||
using StellaOps.Orchestrator.WebService.Contracts;
|
||||
|
||||
namespace StellaOps.Orchestrator.WebService.Endpoints;
|
||||
|
||||
/// <summary>
|
||||
/// REST API endpoints for export job management.
|
||||
/// </summary>
|
||||
public static class ExportJobEndpoints
|
||||
{
|
||||
/// <summary>
|
||||
/// Maps export job endpoints to the route builder.
|
||||
/// </summary>
|
||||
public static void MapExportJobEndpoints(this IEndpointRouteBuilder app)
|
||||
{
|
||||
var group = app.MapGroup("/api/v1/orchestrator/export")
|
||||
.WithTags("Export Jobs");
|
||||
|
||||
group.MapPost("jobs", CreateExportJob)
|
||||
.WithName("Orchestrator_CreateExportJob")
|
||||
.WithDescription("Create a new export job");
|
||||
|
||||
group.MapGet("jobs", ListExportJobs)
|
||||
.WithName("Orchestrator_ListExportJobs")
|
||||
.WithDescription("List export jobs with optional filters");
|
||||
|
||||
group.MapGet("jobs/{jobId:guid}", GetExportJob)
|
||||
.WithName("Orchestrator_GetExportJob")
|
||||
.WithDescription("Get a specific export job");
|
||||
|
||||
group.MapPost("jobs/{jobId:guid}/cancel", CancelExportJob)
|
||||
.WithName("Orchestrator_CancelExportJob")
|
||||
.WithDescription("Cancel a pending or running export job");
|
||||
|
||||
group.MapGet("quota", GetQuotaStatus)
|
||||
.WithName("Orchestrator_GetExportQuotaStatus")
|
||||
.WithDescription("Get export job quota status for the tenant");
|
||||
|
||||
group.MapPost("quota", EnsureQuota)
|
||||
.WithName("Orchestrator_EnsureExportQuota")
|
||||
.WithDescription("Ensure quota exists for an export type (creates with defaults if needed)");
|
||||
|
||||
group.MapGet("types", GetExportTypes)
|
||||
.WithName("Orchestrator_GetExportTypes")
|
||||
.WithDescription("Get available export job types and their rate limits");
|
||||
}
|
||||
|
||||
private static async Task<Results<Created<ExportJobResponse>, BadRequest<ErrorResponse>, Conflict<ErrorResponse>>> CreateExportJob(
|
||||
CreateExportJobRequest request,
|
||||
IExportJobService exportJobService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = GetTenantId(context);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.ExportType))
|
||||
{
|
||||
return TypedResults.BadRequest(new ErrorResponse("invalid_export_type", "Export type is required"));
|
||||
}
|
||||
|
||||
if (!ExportJobTypes.IsExportJob(request.ExportType) && !ExportJobTypes.All.Contains(request.ExportType))
|
||||
{
|
||||
return TypedResults.BadRequest(new ErrorResponse("invalid_export_type", $"Unknown export type: {request.ExportType}"));
|
||||
}
|
||||
|
||||
var payload = new ExportJobPayload(
|
||||
Format: request.Format ?? "json",
|
||||
StartTime: request.StartTime,
|
||||
EndTime: request.EndTime,
|
||||
SourceId: request.SourceId,
|
||||
ProjectId: request.ProjectId,
|
||||
EntityIds: request.EntityIds,
|
||||
MaxEntries: request.MaxEntries,
|
||||
IncludeProvenance: request.IncludeProvenance ?? true,
|
||||
SignOutput: request.SignOutput ?? true,
|
||||
Compression: request.Compression,
|
||||
DestinationUri: request.DestinationUri,
|
||||
CallbackUrl: request.CallbackUrl,
|
||||
Options: request.Options);
|
||||
|
||||
try
|
||||
{
|
||||
var job = await exportJobService.CreateExportJobAsync(
|
||||
tenantId,
|
||||
request.ExportType,
|
||||
payload,
|
||||
GetActorId(context),
|
||||
request.ProjectId,
|
||||
request.CorrelationId,
|
||||
request.Priority,
|
||||
cancellationToken);
|
||||
|
||||
var response = MapToResponse(job);
|
||||
return TypedResults.Created($"/api/v1/orchestrator/export/jobs/{job.JobId}", response);
|
||||
}
|
||||
catch (InvalidOperationException ex)
|
||||
{
|
||||
return TypedResults.Conflict(new ErrorResponse("quota_exceeded", ex.Message));
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<Ok<ExportJobListResponse>> ListExportJobs(
|
||||
IExportJobService exportJobService,
|
||||
HttpContext context,
|
||||
string? exportType = null,
|
||||
string? status = null,
|
||||
string? projectId = null,
|
||||
DateTimeOffset? createdAfter = null,
|
||||
DateTimeOffset? createdBefore = null,
|
||||
int limit = 50,
|
||||
int offset = 0,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var tenantId = GetTenantId(context);
|
||||
|
||||
JobStatus? statusFilter = null;
|
||||
if (!string.IsNullOrEmpty(status) && Enum.TryParse<JobStatus>(status, true, out var parsed))
|
||||
{
|
||||
statusFilter = parsed;
|
||||
}
|
||||
|
||||
var jobs = await exportJobService.ListExportJobsAsync(
|
||||
tenantId,
|
||||
exportType,
|
||||
statusFilter,
|
||||
projectId,
|
||||
createdAfter,
|
||||
createdBefore,
|
||||
limit,
|
||||
offset,
|
||||
cancellationToken);
|
||||
|
||||
var response = new ExportJobListResponse(
|
||||
Items: jobs.Select(MapToResponse).ToList(),
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
HasMore: jobs.Count == limit);
|
||||
|
||||
return TypedResults.Ok(response);
|
||||
}
|
||||
|
||||
private static async Task<Results<Ok<ExportJobResponse>, NotFound>> GetExportJob(
|
||||
Guid jobId,
|
||||
IExportJobService exportJobService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = GetTenantId(context);
|
||||
|
||||
var job = await exportJobService.GetExportJobAsync(tenantId, jobId, cancellationToken);
|
||||
if (job is null)
|
||||
{
|
||||
return TypedResults.NotFound();
|
||||
}
|
||||
|
||||
return TypedResults.Ok(MapToResponse(job));
|
||||
}
|
||||
|
||||
private static async Task<Results<Ok<CancelExportJobResponse>, NotFound, BadRequest<ErrorResponse>>> CancelExportJob(
|
||||
Guid jobId,
|
||||
CancelExportJobRequest request,
|
||||
IExportJobService exportJobService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = GetTenantId(context);
|
||||
|
||||
var success = await exportJobService.CancelExportJobAsync(
|
||||
tenantId,
|
||||
jobId,
|
||||
request.Reason ?? "Canceled by user",
|
||||
GetActorId(context),
|
||||
cancellationToken);
|
||||
|
||||
if (!success)
|
||||
{
|
||||
var job = await exportJobService.GetExportJobAsync(tenantId, jobId, cancellationToken);
|
||||
if (job is null)
|
||||
{
|
||||
return TypedResults.NotFound();
|
||||
}
|
||||
|
||||
return TypedResults.BadRequest(new ErrorResponse(
|
||||
"cannot_cancel",
|
||||
$"Cannot cancel job in status: {job.Status}"));
|
||||
}
|
||||
|
||||
return TypedResults.Ok(new CancelExportJobResponse(jobId, true, DateTimeOffset.UtcNow));
|
||||
}
|
||||
|
||||
private static async Task<Ok<ExportQuotaStatusResponse>> GetQuotaStatus(
|
||||
IExportJobService exportJobService,
|
||||
HttpContext context,
|
||||
string? exportType = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var tenantId = GetTenantId(context);
|
||||
|
||||
var status = await exportJobService.GetQuotaStatusAsync(tenantId, exportType, cancellationToken);
|
||||
|
||||
var response = new ExportQuotaStatusResponse(
|
||||
MaxActive: status.MaxActive,
|
||||
CurrentActive: status.CurrentActive,
|
||||
MaxPerHour: status.MaxPerHour,
|
||||
CurrentHourCount: status.CurrentHourCount,
|
||||
AvailableTokens: status.AvailableTokens,
|
||||
Paused: status.Paused,
|
||||
PauseReason: status.PauseReason,
|
||||
CanCreateJob: status.CanCreateJob,
|
||||
EstimatedWaitSeconds: status.EstimatedWaitTime?.TotalSeconds);
|
||||
|
||||
return TypedResults.Ok(response);
|
||||
}
|
||||
|
||||
private static async Task<Created<QuotaResponse>> EnsureQuota(
|
||||
EnsureExportQuotaRequest request,
|
||||
IExportJobService exportJobService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = GetTenantId(context);
|
||||
|
||||
var quota = await exportJobService.EnsureQuotaAsync(
|
||||
tenantId,
|
||||
request.ExportType,
|
||||
GetActorId(context),
|
||||
cancellationToken);
|
||||
|
||||
var response = QuotaResponse.FromDomain(quota);
|
||||
|
||||
return TypedResults.Created($"/api/v1/orchestrator/quotas/{quota.QuotaId}", response);
|
||||
}
|
||||
|
||||
private static Ok<ExportTypesResponse> GetExportTypes()
|
||||
{
|
||||
var types = ExportJobTypes.All.Select(jobType =>
|
||||
{
|
||||
var rateLimit = ExportJobPolicy.RateLimits.GetForJobType(jobType);
|
||||
var target = ExportJobTypes.GetExportTarget(jobType) ?? "unknown";
|
||||
|
||||
return new ExportTypeInfo(
|
||||
JobType: jobType,
|
||||
Target: target,
|
||||
MaxConcurrent: rateLimit.MaxConcurrent,
|
||||
MaxPerHour: rateLimit.MaxPerHour,
|
||||
EstimatedDurationSeconds: rateLimit.EstimatedDurationSeconds);
|
||||
}).ToList();
|
||||
|
||||
return TypedResults.Ok(new ExportTypesResponse(
|
||||
Types: types,
|
||||
DefaultQuota: new DefaultQuotaInfo(
|
||||
MaxActive: ExportJobPolicy.QuotaDefaults.MaxActive,
|
||||
MaxPerHour: ExportJobPolicy.QuotaDefaults.MaxPerHour,
|
||||
BurstCapacity: ExportJobPolicy.QuotaDefaults.BurstCapacity,
|
||||
RefillRate: ExportJobPolicy.QuotaDefaults.RefillRate,
|
||||
DefaultPriority: ExportJobPolicy.QuotaDefaults.DefaultPriority,
|
||||
MaxAttempts: ExportJobPolicy.QuotaDefaults.MaxAttempts,
|
||||
DefaultLeaseSeconds: ExportJobPolicy.QuotaDefaults.DefaultLeaseSeconds,
|
||||
RecommendedHeartbeatInterval: ExportJobPolicy.QuotaDefaults.RecommendedHeartbeatInterval)));
|
||||
}
|
||||
|
||||
private static string GetTenantId(HttpContext context) =>
|
||||
context.Request.Headers["X-StellaOps-Tenant"].FirstOrDefault() ?? "default";
|
||||
|
||||
private static string GetActorId(HttpContext context) =>
|
||||
context.User.Identity?.Name ?? "anonymous";
|
||||
|
||||
private static ExportJobResponse MapToResponse(Job job) => new(
|
||||
JobId: job.JobId,
|
||||
TenantId: job.TenantId,
|
||||
ProjectId: job.ProjectId,
|
||||
ExportType: job.JobType,
|
||||
Status: job.Status.ToString(),
|
||||
Priority: job.Priority,
|
||||
Attempt: job.Attempt,
|
||||
MaxAttempts: job.MaxAttempts,
|
||||
PayloadDigest: job.PayloadDigest,
|
||||
IdempotencyKey: job.IdempotencyKey,
|
||||
CorrelationId: job.CorrelationId,
|
||||
WorkerId: job.WorkerId,
|
||||
LeaseUntil: job.LeaseUntil,
|
||||
CreatedAt: job.CreatedAt,
|
||||
ScheduledAt: job.ScheduledAt,
|
||||
LeasedAt: job.LeasedAt,
|
||||
CompletedAt: job.CompletedAt,
|
||||
Reason: job.Reason,
|
||||
CreatedBy: job.CreatedBy);
|
||||
}
|
||||
|
||||
// Request/Response records
|
||||
|
||||
public sealed record CreateExportJobRequest(
|
||||
string ExportType,
|
||||
string? Format,
|
||||
DateTimeOffset? StartTime,
|
||||
DateTimeOffset? EndTime,
|
||||
Guid? SourceId,
|
||||
string? ProjectId,
|
||||
IReadOnlyList<Guid>? EntityIds,
|
||||
int? MaxEntries,
|
||||
bool? IncludeProvenance,
|
||||
bool? SignOutput,
|
||||
string? Compression,
|
||||
string? DestinationUri,
|
||||
string? CallbackUrl,
|
||||
string? CorrelationId,
|
||||
int? Priority,
|
||||
IReadOnlyDictionary<string, string>? Options);
|
||||
|
||||
public sealed record ExportJobResponse(
|
||||
Guid JobId,
|
||||
string TenantId,
|
||||
string? ProjectId,
|
||||
string ExportType,
|
||||
string Status,
|
||||
int Priority,
|
||||
int Attempt,
|
||||
int MaxAttempts,
|
||||
string PayloadDigest,
|
||||
string IdempotencyKey,
|
||||
string? CorrelationId,
|
||||
string? WorkerId,
|
||||
DateTimeOffset? LeaseUntil,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset? ScheduledAt,
|
||||
DateTimeOffset? LeasedAt,
|
||||
DateTimeOffset? CompletedAt,
|
||||
string? Reason,
|
||||
string CreatedBy);
|
||||
|
||||
public sealed record ExportJobListResponse(
|
||||
IReadOnlyList<ExportJobResponse> Items,
|
||||
int Limit,
|
||||
int Offset,
|
||||
bool HasMore);
|
||||
|
||||
public sealed record CancelExportJobRequest(string? Reason);
|
||||
|
||||
public sealed record CancelExportJobResponse(
|
||||
Guid JobId,
|
||||
bool Canceled,
|
||||
DateTimeOffset CanceledAt);
|
||||
|
||||
public sealed record ExportQuotaStatusResponse(
|
||||
int MaxActive,
|
||||
int CurrentActive,
|
||||
int MaxPerHour,
|
||||
int CurrentHourCount,
|
||||
double AvailableTokens,
|
||||
bool Paused,
|
||||
string? PauseReason,
|
||||
bool CanCreateJob,
|
||||
double? EstimatedWaitSeconds);
|
||||
|
||||
public sealed record EnsureExportQuotaRequest(string ExportType);
|
||||
|
||||
public sealed record ExportTypesResponse(
|
||||
IReadOnlyList<ExportTypeInfo> Types,
|
||||
DefaultQuotaInfo DefaultQuota);
|
||||
|
||||
public sealed record ExportTypeInfo(
|
||||
string JobType,
|
||||
string Target,
|
||||
int MaxConcurrent,
|
||||
int MaxPerHour,
|
||||
int EstimatedDurationSeconds);
|
||||
|
||||
public sealed record DefaultQuotaInfo(
|
||||
int MaxActive,
|
||||
int MaxPerHour,
|
||||
int BurstCapacity,
|
||||
double RefillRate,
|
||||
int DefaultPriority,
|
||||
int MaxAttempts,
|
||||
int DefaultLeaseSeconds,
|
||||
int RecommendedHeartbeatInterval);
|
||||
|
||||
public sealed record ErrorResponse(string Error, string Message);
|
||||
@@ -0,0 +1,837 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
using StellaOps.Orchestrator.Infrastructure;
|
||||
using StellaOps.Orchestrator.Infrastructure.Repositories;
|
||||
using StellaOps.Orchestrator.WebService.Contracts;
|
||||
using StellaOps.Orchestrator.WebService.Services;
|
||||
using PackLogLevel = StellaOps.Orchestrator.Core.Domain.LogLevel;
|
||||
|
||||
namespace StellaOps.Orchestrator.WebService.Endpoints;
|
||||
|
||||
/// <summary>
|
||||
/// Pack run endpoints for scheduling, execution, and log management.
|
||||
/// </summary>
|
||||
public static class PackRunEndpoints
|
||||
{
|
||||
private const int DefaultLeaseSeconds = 300; // 5 minutes
|
||||
private const int MaxLeaseSeconds = 3600; // 1 hour
|
||||
private const int DefaultExtendSeconds = 300;
|
||||
private const int MaxExtendSeconds = 1800; // 30 minutes
|
||||
private const int DefaultLogLimit = 100;
|
||||
private const int MaxLogLimit = 1000;
|
||||
|
||||
/// <summary>
|
||||
/// Maps pack run endpoints to the route builder.
|
||||
/// </summary>
|
||||
public static RouteGroupBuilder MapPackRunEndpoints(this IEndpointRouteBuilder app)
|
||||
{
|
||||
var group = app.MapGroup("/api/v1/orchestrator/pack-runs")
|
||||
.WithTags("Orchestrator Pack Runs");
|
||||
|
||||
// Scheduling endpoints
|
||||
group.MapPost("", SchedulePackRun)
|
||||
.WithName("Orchestrator_SchedulePackRun")
|
||||
.WithDescription("Schedule a new pack run");
|
||||
|
||||
group.MapGet("{packRunId:guid}", GetPackRun)
|
||||
.WithName("Orchestrator_GetPackRun")
|
||||
.WithDescription("Get pack run details");
|
||||
|
||||
group.MapGet("", ListPackRuns)
|
||||
.WithName("Orchestrator_ListPackRuns")
|
||||
.WithDescription("List pack runs with filters");
|
||||
|
||||
// Task runner (worker) endpoints
|
||||
group.MapPost("claim", ClaimPackRun)
|
||||
.WithName("Orchestrator_ClaimPackRun")
|
||||
.WithDescription("Claim a pack run for execution");
|
||||
|
||||
group.MapPost("{packRunId:guid}/heartbeat", Heartbeat)
|
||||
.WithName("Orchestrator_PackRunHeartbeat")
|
||||
.WithDescription("Extend pack run lease");
|
||||
|
||||
group.MapPost("{packRunId:guid}/start", StartPackRun)
|
||||
.WithName("Orchestrator_StartPackRun")
|
||||
.WithDescription("Mark pack run as started");
|
||||
|
||||
group.MapPost("{packRunId:guid}/complete", CompletePackRun)
|
||||
.WithName("Orchestrator_CompletePackRun")
|
||||
.WithDescription("Complete a pack run");
|
||||
|
||||
// Log endpoints
|
||||
group.MapPost("{packRunId:guid}/logs", AppendLogs)
|
||||
.WithName("Orchestrator_AppendPackRunLogs")
|
||||
.WithDescription("Append logs to a pack run");
|
||||
|
||||
group.MapGet("{packRunId:guid}/logs", GetLogs)
|
||||
.WithName("Orchestrator_GetPackRunLogs")
|
||||
.WithDescription("Get pack run logs with cursor pagination");
|
||||
|
||||
// Cancel/retry endpoints
|
||||
group.MapPost("{packRunId:guid}/cancel", CancelPackRun)
|
||||
.WithName("Orchestrator_CancelPackRun")
|
||||
.WithDescription("Cancel a pack run");
|
||||
|
||||
group.MapPost("{packRunId:guid}/retry", RetryPackRun)
|
||||
.WithName("Orchestrator_RetryPackRun")
|
||||
.WithDescription("Retry a failed pack run");
|
||||
|
||||
return group;
|
||||
}
|
||||
|
||||
// ========== Scheduling Endpoints ==========
|
||||
|
||||
private static async Task<IResult> SchedulePackRun(
|
||||
HttpContext context,
|
||||
[FromBody] SchedulePackRunRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRunRepository packRunRepository,
|
||||
[FromServices] IEventPublisher eventPublisher,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Validate request
|
||||
if (string.IsNullOrWhiteSpace(request.PackId))
|
||||
{
|
||||
return Results.BadRequest(new PackRunErrorResponse(
|
||||
"invalid_request", "PackId is required", null, null));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.PackVersion))
|
||||
{
|
||||
return Results.BadRequest(new PackRunErrorResponse(
|
||||
"invalid_request", "PackVersion is required", null, null));
|
||||
}
|
||||
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var now = timeProvider.GetUtcNow();
|
||||
var parameters = request.Parameters ?? "{}";
|
||||
var parametersDigest = ComputeDigest(parameters);
|
||||
var idempotencyKey = request.IdempotencyKey ?? $"pack-run:{request.PackId}:{parametersDigest}:{now:yyyyMMddHHmm}";
|
||||
|
||||
// Check for existing pack run with same idempotency key
|
||||
var existing = await packRunRepository.GetByIdempotencyKeyAsync(tenantId, idempotencyKey, cancellationToken);
|
||||
if (existing is not null)
|
||||
{
|
||||
return Results.Ok(new SchedulePackRunResponse(
|
||||
existing.PackRunId,
|
||||
existing.PackId,
|
||||
existing.PackVersion,
|
||||
existing.Status.ToString().ToLowerInvariant(),
|
||||
existing.IdempotencyKey,
|
||||
existing.CreatedAt,
|
||||
WasAlreadyScheduled: true));
|
||||
}
|
||||
|
||||
// Create new pack run
|
||||
var packRunId = Guid.NewGuid();
|
||||
var packRun = PackRun.Create(
|
||||
packRunId: packRunId,
|
||||
tenantId: tenantId,
|
||||
projectId: request.ProjectId,
|
||||
packId: request.PackId,
|
||||
packVersion: request.PackVersion,
|
||||
parameters: parameters,
|
||||
parametersDigest: parametersDigest,
|
||||
idempotencyKey: idempotencyKey,
|
||||
correlationId: request.CorrelationId,
|
||||
createdBy: context.User?.Identity?.Name ?? "system",
|
||||
priority: request.Priority ?? 0,
|
||||
maxAttempts: request.MaxAttempts ?? 3,
|
||||
metadata: request.Metadata,
|
||||
createdAt: now);
|
||||
|
||||
await packRunRepository.CreateAsync(packRun, cancellationToken);
|
||||
|
||||
OrchestratorMetrics.PackRunCreated(tenantId, request.PackId);
|
||||
|
||||
// Publish event
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.PackRunCreated,
|
||||
tenantId: tenantId,
|
||||
actor: EventActor.User(context.User?.Identity?.Name ?? "system", "webservice"),
|
||||
correlationId: request.CorrelationId,
|
||||
projectId: request.ProjectId,
|
||||
payload: ToPayload(new { packRunId, packId = request.PackId, packVersion = request.PackVersion }));
|
||||
await eventPublisher.PublishAsync(envelope, cancellationToken);
|
||||
|
||||
return Results.Created($"/api/v1/orchestrator/pack-runs/{packRunId}", new SchedulePackRunResponse(
|
||||
packRunId,
|
||||
request.PackId,
|
||||
request.PackVersion,
|
||||
packRun.Status.ToString().ToLowerInvariant(),
|
||||
idempotencyKey,
|
||||
now,
|
||||
WasAlreadyScheduled: false));
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetPackRun(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packRunId,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRunRepository packRunRepository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var packRun = await packRunRepository.GetByIdAsync(tenantId, packRunId, cancellationToken);
|
||||
|
||||
if (packRun is null)
|
||||
{
|
||||
return Results.NotFound(new PackRunErrorResponse(
|
||||
"not_found", $"Pack run {packRunId} not found", packRunId, null));
|
||||
}
|
||||
|
||||
return Results.Ok(PackRunResponse.FromDomain(packRun));
|
||||
}
|
||||
|
||||
private static async Task<IResult> ListPackRuns(
|
||||
HttpContext context,
|
||||
[FromQuery] string? packId,
|
||||
[FromQuery] string? status,
|
||||
[FromQuery] string? projectId,
|
||||
[FromQuery] DateTimeOffset? createdAfter,
|
||||
[FromQuery] DateTimeOffset? createdBefore,
|
||||
[FromQuery] int? limit,
|
||||
[FromQuery] int? offset,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRunRepository packRunRepository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var effectiveLimit = Math.Min(limit ?? 50, 100);
|
||||
var effectiveOffset = offset ?? 0;
|
||||
|
||||
PackRunStatus? statusFilter = null;
|
||||
if (!string.IsNullOrEmpty(status) && Enum.TryParse<PackRunStatus>(status, true, out var parsed))
|
||||
{
|
||||
statusFilter = parsed;
|
||||
}
|
||||
|
||||
var packRuns = await packRunRepository.ListAsync(
|
||||
tenantId, packId, statusFilter, projectId,
|
||||
createdAfter, createdBefore,
|
||||
effectiveLimit, effectiveOffset, cancellationToken);
|
||||
|
||||
var totalCount = await packRunRepository.CountAsync(
|
||||
tenantId, packId, statusFilter, projectId, cancellationToken);
|
||||
|
||||
var responses = packRuns.Select(PackRunResponse.FromDomain).ToList();
|
||||
var nextCursor = responses.Count == effectiveLimit
|
||||
? (effectiveOffset + effectiveLimit).ToString()
|
||||
: null;
|
||||
|
||||
return Results.Ok(new PackRunListResponse(responses, totalCount, nextCursor));
|
||||
}
|
||||
|
||||
// ========== Task Runner Endpoints ==========
|
||||
|
||||
private static async Task<IResult> ClaimPackRun(
|
||||
HttpContext context,
|
||||
[FromBody] ClaimPackRunRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRunRepository packRunRepository,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.TaskRunnerId))
|
||||
{
|
||||
return Results.BadRequest(new PackRunErrorResponse(
|
||||
"invalid_request", "TaskRunnerId is required", null, null));
|
||||
}
|
||||
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var leaseSeconds = Math.Min(request.LeaseSeconds ?? DefaultLeaseSeconds, MaxLeaseSeconds);
|
||||
var now = timeProvider.GetUtcNow();
|
||||
var leaseUntil = now.AddSeconds(leaseSeconds);
|
||||
var leaseId = Guid.NewGuid();
|
||||
|
||||
// Idempotency check
|
||||
if (!string.IsNullOrEmpty(request.IdempotencyKey))
|
||||
{
|
||||
var existingRun = await packRunRepository.GetByIdempotencyKeyAsync(
|
||||
tenantId, $"claim:{request.IdempotencyKey}", cancellationToken);
|
||||
|
||||
if (existingRun is not null && existingRun.Status == PackRunStatus.Leased &&
|
||||
existingRun.TaskRunnerId == request.TaskRunnerId)
|
||||
{
|
||||
return Results.Ok(CreateClaimResponse(existingRun));
|
||||
}
|
||||
}
|
||||
|
||||
var packRun = await packRunRepository.LeaseNextAsync(
|
||||
tenantId, request.PackId, leaseId, request.TaskRunnerId, leaseUntil, cancellationToken);
|
||||
|
||||
if (packRun is null)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("no_pack_runs_available", "No pack runs available for claim", null, 5),
|
||||
statusCode: StatusCodes.Status204NoContent);
|
||||
}
|
||||
|
||||
OrchestratorMetrics.PackRunLeased(tenantId, packRun.PackId);
|
||||
|
||||
return Results.Ok(CreateClaimResponse(packRun));
|
||||
}
|
||||
|
||||
private static async Task<IResult> Heartbeat(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packRunId,
|
||||
[FromBody] PackRunHeartbeatRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRunRepository packRunRepository,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
|
||||
var packRun = await packRunRepository.GetByIdAsync(tenantId, packRunId, cancellationToken);
|
||||
if (packRun is null)
|
||||
{
|
||||
return Results.NotFound(new PackRunErrorResponse(
|
||||
"not_found", $"Pack run {packRunId} not found", packRunId, null));
|
||||
}
|
||||
|
||||
if (packRun.LeaseId != request.LeaseId)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("invalid_lease", "Lease ID does not match", packRunId, null),
|
||||
statusCode: StatusCodes.Status409Conflict);
|
||||
}
|
||||
|
||||
if (packRun.Status != PackRunStatus.Leased && packRun.Status != PackRunStatus.Running)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("invalid_status", $"Pack run is not in leased/running status: {packRun.Status}", packRunId, null),
|
||||
statusCode: StatusCodes.Status409Conflict);
|
||||
}
|
||||
|
||||
var extendSeconds = Math.Min(request.ExtendSeconds ?? DefaultExtendSeconds, MaxExtendSeconds);
|
||||
var now = timeProvider.GetUtcNow();
|
||||
var newLeaseUntil = now.AddSeconds(extendSeconds);
|
||||
|
||||
var extended = await packRunRepository.ExtendLeaseAsync(
|
||||
tenantId, packRunId, request.LeaseId, newLeaseUntil, cancellationToken);
|
||||
|
||||
if (!extended)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("lease_expired", "Lease has expired", packRunId, null),
|
||||
statusCode: StatusCodes.Status409Conflict);
|
||||
}
|
||||
|
||||
OrchestratorMetrics.PackRunHeartbeatReceived(tenantId, packRun.PackId);
|
||||
|
||||
return Results.Ok(new PackRunHeartbeatResponse(packRunId, request.LeaseId, newLeaseUntil, Acknowledged: true));
|
||||
}
|
||||
|
||||
private static async Task<IResult> StartPackRun(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packRunId,
|
||||
[FromBody] PackRunStartRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRunRepository packRunRepository,
|
||||
[FromServices] IPackRunLogRepository logRepository,
|
||||
[FromServices] IEventPublisher eventPublisher,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
|
||||
var packRun = await packRunRepository.GetByIdAsync(tenantId, packRunId, cancellationToken);
|
||||
if (packRun is null)
|
||||
{
|
||||
return Results.NotFound(new PackRunErrorResponse(
|
||||
"not_found", $"Pack run {packRunId} not found", packRunId, null));
|
||||
}
|
||||
|
||||
if (packRun.LeaseId != request.LeaseId)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("invalid_lease", "Lease ID does not match", packRunId, null),
|
||||
statusCode: StatusCodes.Status409Conflict);
|
||||
}
|
||||
|
||||
if (packRun.Status != PackRunStatus.Leased)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("invalid_status", $"Pack run is not in leased status: {packRun.Status}", packRunId, null),
|
||||
statusCode: StatusCodes.Status409Conflict);
|
||||
}
|
||||
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
await packRunRepository.UpdateStatusAsync(
|
||||
tenantId, packRunId,
|
||||
PackRunStatus.Running,
|
||||
packRun.Attempt,
|
||||
packRun.LeaseId,
|
||||
packRun.TaskRunnerId,
|
||||
packRun.LeaseUntil,
|
||||
packRun.ScheduledAt,
|
||||
packRun.LeasedAt,
|
||||
now, // startedAt
|
||||
null, null, null, null, null,
|
||||
cancellationToken);
|
||||
|
||||
// Append system log entry
|
||||
var log = PackRunLog.System(packRunId, tenantId, 0, PackLogLevel.Info, "Pack run started", null, now);
|
||||
await logRepository.AppendAsync(log, cancellationToken);
|
||||
|
||||
OrchestratorMetrics.PackRunStarted(tenantId, packRun.PackId);
|
||||
|
||||
// Publish event
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.PackRunStarted,
|
||||
tenantId: tenantId,
|
||||
actor: EventActor.System("task-runner", packRun.TaskRunnerId ?? "unknown"),
|
||||
correlationId: packRun.CorrelationId,
|
||||
projectId: packRun.ProjectId,
|
||||
payload: ToPayload(new { packRunId, packId = packRun.PackId, packVersion = packRun.PackVersion }));
|
||||
await eventPublisher.PublishAsync(envelope, cancellationToken);
|
||||
|
||||
return Results.Ok(new PackRunStartResponse(packRunId, Acknowledged: true, StartedAt: now));
|
||||
}
|
||||
|
||||
private static async Task<IResult> CompletePackRun(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packRunId,
|
||||
[FromBody] CompletePackRunRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRunRepository packRunRepository,
|
||||
[FromServices] IPackRunLogRepository logRepository,
|
||||
[FromServices] IArtifactRepository artifactRepository,
|
||||
[FromServices] IEventPublisher eventPublisher,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
|
||||
var packRun = await packRunRepository.GetByIdAsync(tenantId, packRunId, cancellationToken);
|
||||
if (packRun is null)
|
||||
{
|
||||
return Results.NotFound(new PackRunErrorResponse(
|
||||
"not_found", $"Pack run {packRunId} not found", packRunId, null));
|
||||
}
|
||||
|
||||
if (packRun.LeaseId != request.LeaseId)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("invalid_lease", "Lease ID does not match", packRunId, null),
|
||||
statusCode: StatusCodes.Status409Conflict);
|
||||
}
|
||||
|
||||
if (packRun.Status != PackRunStatus.Leased && packRun.Status != PackRunStatus.Running)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("invalid_status", $"Pack run is not in leased/running status: {packRun.Status}", packRunId, null),
|
||||
statusCode: StatusCodes.Status409Conflict);
|
||||
}
|
||||
|
||||
var now = timeProvider.GetUtcNow();
|
||||
var newStatus = request.Success ? PackRunStatus.Succeeded : PackRunStatus.Failed;
|
||||
var durationMs = packRun.StartedAt.HasValue
|
||||
? (long)(now - packRun.StartedAt.Value).TotalMilliseconds
|
||||
: (packRun.LeasedAt.HasValue ? (long)(now - packRun.LeasedAt.Value).TotalMilliseconds : 0);
|
||||
|
||||
// Create artifacts
|
||||
var artifactIds = new List<Guid>();
|
||||
if (request.Artifacts is { Count: > 0 })
|
||||
{
|
||||
var artifacts = request.Artifacts.Select(a => new Artifact(
|
||||
ArtifactId: Guid.NewGuid(),
|
||||
TenantId: tenantId,
|
||||
JobId: Guid.Empty, // Pack runs don't have a job ID
|
||||
RunId: null, // Pack runs are not part of a run
|
||||
ArtifactType: a.ArtifactType,
|
||||
Uri: a.Uri,
|
||||
Digest: a.Digest,
|
||||
MimeType: a.MimeType,
|
||||
SizeBytes: a.SizeBytes,
|
||||
CreatedAt: now,
|
||||
Metadata: $"{{\"packRunId\":\"{packRunId}\",\"packId\":\"{packRun.PackId}\"{(a.Metadata != null ? "," + a.Metadata.TrimStart('{').TrimEnd('}') : "")}}}")).ToList();
|
||||
|
||||
await artifactRepository.CreateBatchAsync(artifacts, cancellationToken);
|
||||
artifactIds.AddRange(artifacts.Select(a => a.ArtifactId));
|
||||
|
||||
foreach (var artifact in artifacts)
|
||||
{
|
||||
OrchestratorMetrics.ArtifactCreated(tenantId, artifact.ArtifactType);
|
||||
}
|
||||
}
|
||||
|
||||
// Update status
|
||||
await packRunRepository.UpdateStatusAsync(
|
||||
tenantId, packRunId,
|
||||
newStatus,
|
||||
packRun.Attempt,
|
||||
null, // clear lease
|
||||
null, // clear task runner
|
||||
null, // clear lease until
|
||||
packRun.ScheduledAt,
|
||||
packRun.LeasedAt,
|
||||
packRun.StartedAt,
|
||||
now, // completedAt
|
||||
null,
|
||||
request.Reason,
|
||||
request.ExitCode,
|
||||
durationMs,
|
||||
cancellationToken);
|
||||
|
||||
// Append system log entry
|
||||
var (logCount, latestSeq) = await logRepository.GetLogStatsAsync(tenantId, packRunId, cancellationToken);
|
||||
var completionLog = PackRunLog.System(
|
||||
packRunId, tenantId, latestSeq + 1,
|
||||
request.Success ? PackLogLevel.Info : PackLogLevel.Error,
|
||||
$"Pack run {(request.Success ? "succeeded" : "failed")} with exit code {request.ExitCode}",
|
||||
null, now);
|
||||
await logRepository.AppendAsync(completionLog, cancellationToken);
|
||||
|
||||
// Record metrics
|
||||
var durationSeconds = durationMs / 1000.0;
|
||||
if (request.Success)
|
||||
{
|
||||
OrchestratorMetrics.PackRunCompleted(tenantId, packRun.PackId, "succeeded");
|
||||
}
|
||||
else
|
||||
{
|
||||
OrchestratorMetrics.PackRunFailed(tenantId, packRun.PackId);
|
||||
}
|
||||
OrchestratorMetrics.RecordPackRunDuration(tenantId, packRun.PackId, durationSeconds);
|
||||
OrchestratorMetrics.RecordPackRunLogCount(tenantId, packRun.PackId, logCount + 1);
|
||||
|
||||
// Publish event
|
||||
var eventType = request.Success
|
||||
? OrchestratorEventType.PackRunCompleted
|
||||
: OrchestratorEventType.PackRunFailed;
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: eventType,
|
||||
tenantId: tenantId,
|
||||
actor: EventActor.System("task-runner", packRun.TaskRunnerId ?? "unknown"),
|
||||
correlationId: packRun.CorrelationId,
|
||||
projectId: packRun.ProjectId,
|
||||
payload: ToPayload(new
|
||||
{
|
||||
packRunId,
|
||||
packId = packRun.PackId,
|
||||
packVersion = packRun.PackVersion,
|
||||
exitCode = request.ExitCode,
|
||||
durationMs,
|
||||
artifactCount = artifactIds.Count
|
||||
}));
|
||||
await eventPublisher.PublishAsync(envelope, cancellationToken);
|
||||
|
||||
return Results.Ok(new CompletePackRunResponse(
|
||||
packRunId,
|
||||
newStatus.ToString().ToLowerInvariant(),
|
||||
now,
|
||||
artifactIds,
|
||||
durationMs));
|
||||
}
|
||||
|
||||
// ========== Log Endpoints ==========
|
||||
|
||||
private static async Task<IResult> AppendLogs(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packRunId,
|
||||
[FromBody] AppendLogsRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRunRepository packRunRepository,
|
||||
[FromServices] IPackRunLogRepository logRepository,
|
||||
[FromServices] IEventPublisher eventPublisher,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
|
||||
var packRun = await packRunRepository.GetByIdAsync(tenantId, packRunId, cancellationToken);
|
||||
if (packRun is null)
|
||||
{
|
||||
return Results.NotFound(new PackRunErrorResponse(
|
||||
"not_found", $"Pack run {packRunId} not found", packRunId, null));
|
||||
}
|
||||
|
||||
if (packRun.LeaseId != request.LeaseId)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("invalid_lease", "Lease ID does not match", packRunId, null),
|
||||
statusCode: StatusCodes.Status409Conflict);
|
||||
}
|
||||
|
||||
if (request.Logs.Count == 0)
|
||||
{
|
||||
return Results.Ok(new AppendLogsResponse(packRunId, 0, 0));
|
||||
}
|
||||
|
||||
// Get current sequence
|
||||
var (_, currentSeq) = await logRepository.GetLogStatsAsync(tenantId, packRunId, cancellationToken);
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
var logs = new List<PackRunLog>();
|
||||
var seq = currentSeq;
|
||||
foreach (var entry in request.Logs)
|
||||
{
|
||||
seq++;
|
||||
var level = Enum.TryParse<PackLogLevel>(entry.Level, true, out var parsedLevel)
|
||||
? parsedLevel
|
||||
: PackLogLevel.Info;
|
||||
|
||||
logs.Add(PackRunLog.Create(
|
||||
packRunId, tenantId, seq, level,
|
||||
entry.Source,
|
||||
entry.Message,
|
||||
entry.Data,
|
||||
entry.Timestamp ?? now));
|
||||
}
|
||||
|
||||
await logRepository.AppendBatchAsync(logs, cancellationToken);
|
||||
|
||||
OrchestratorMetrics.PackRunLogAppended(tenantId, packRun.PackId, logs.Count);
|
||||
|
||||
// Publish log event for streaming
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.PackRunLog,
|
||||
tenantId: tenantId,
|
||||
actor: EventActor.System("task-runner", packRun.TaskRunnerId ?? "unknown"),
|
||||
correlationId: packRun.CorrelationId,
|
||||
projectId: packRun.ProjectId,
|
||||
payload: ToPayload(new { packRunId, logCount = logs.Count, latestSequence = seq }));
|
||||
await eventPublisher.PublishAsync(envelope, cancellationToken);
|
||||
|
||||
return Results.Ok(new AppendLogsResponse(packRunId, logs.Count, seq));
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetLogs(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packRunId,
|
||||
[FromQuery] long? afterSequence,
|
||||
[FromQuery] string? level,
|
||||
[FromQuery] string? search,
|
||||
[FromQuery] int? limit,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRunRepository packRunRepository,
|
||||
[FromServices] IPackRunLogRepository logRepository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
|
||||
var packRun = await packRunRepository.GetByIdAsync(tenantId, packRunId, cancellationToken);
|
||||
if (packRun is null)
|
||||
{
|
||||
return Results.NotFound(new PackRunErrorResponse(
|
||||
"not_found", $"Pack run {packRunId} not found", packRunId, null));
|
||||
}
|
||||
|
||||
var effectiveLimit = Math.Min(limit ?? DefaultLogLimit, MaxLogLimit);
|
||||
var after = afterSequence ?? -1;
|
||||
|
||||
PackRunLogBatch batch;
|
||||
|
||||
if (!string.IsNullOrEmpty(search))
|
||||
{
|
||||
batch = await logRepository.SearchLogsAsync(tenantId, packRunId, search, after, effectiveLimit, cancellationToken);
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(level) && Enum.TryParse<PackLogLevel>(level, true, out var minLevel))
|
||||
{
|
||||
batch = await logRepository.GetLogsByLevelAsync(tenantId, packRunId, minLevel, after, effectiveLimit, cancellationToken);
|
||||
}
|
||||
else
|
||||
{
|
||||
batch = await logRepository.GetLogsAsync(tenantId, packRunId, after, effectiveLimit, cancellationToken);
|
||||
}
|
||||
|
||||
var responses = batch.Logs.Select(LogEntryResponse.FromDomain).ToList();
|
||||
var hasMore = responses.Count == effectiveLimit;
|
||||
long? nextSeq = hasMore && responses.Count > 0 ? responses[^1].Sequence : null;
|
||||
|
||||
return Results.Ok(new LogBatchResponse(
|
||||
packRunId,
|
||||
responses,
|
||||
batch.StartSequence,
|
||||
nextSeq,
|
||||
hasMore));
|
||||
}
|
||||
|
||||
// ========== Cancel/Retry Endpoints ==========
|
||||
|
||||
private static async Task<IResult> CancelPackRun(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packRunId,
|
||||
[FromBody] CancelPackRunRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRunRepository packRunRepository,
|
||||
[FromServices] IPackRunLogRepository logRepository,
|
||||
[FromServices] IEventPublisher eventPublisher,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
|
||||
var packRun = await packRunRepository.GetByIdAsync(tenantId, packRunId, cancellationToken);
|
||||
if (packRun is null)
|
||||
{
|
||||
return Results.NotFound(new PackRunErrorResponse(
|
||||
"not_found", $"Pack run {packRunId} not found", packRunId, null));
|
||||
}
|
||||
|
||||
if (packRun.IsTerminal)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("already_terminal", $"Pack run is already in terminal status: {packRun.Status}", packRunId, null),
|
||||
statusCode: StatusCodes.Status409Conflict);
|
||||
}
|
||||
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
await packRunRepository.UpdateStatusAsync(
|
||||
tenantId, packRunId,
|
||||
PackRunStatus.Canceled,
|
||||
packRun.Attempt,
|
||||
null, null, null,
|
||||
packRun.ScheduledAt,
|
||||
packRun.LeasedAt,
|
||||
packRun.StartedAt,
|
||||
now,
|
||||
null,
|
||||
request.Reason,
|
||||
null, null,
|
||||
cancellationToken);
|
||||
|
||||
// Append system log entry
|
||||
var (_, latestSeq) = await logRepository.GetLogStatsAsync(tenantId, packRunId, cancellationToken);
|
||||
var cancelLog = PackRunLog.System(
|
||||
packRunId, tenantId, latestSeq + 1,
|
||||
PackLogLevel.Warn, $"Pack run canceled: {request.Reason}", null, now);
|
||||
await logRepository.AppendAsync(cancelLog, cancellationToken);
|
||||
|
||||
OrchestratorMetrics.PackRunCanceled(tenantId, packRun.PackId);
|
||||
|
||||
// Publish event
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.PackRunFailed, // Use Failed for canceled
|
||||
tenantId: tenantId,
|
||||
actor: EventActor.User(context.User?.Identity?.Name ?? "system", "webservice"),
|
||||
correlationId: packRun.CorrelationId,
|
||||
projectId: packRun.ProjectId,
|
||||
payload: ToPayload(new { packRunId, packId = packRun.PackId, status = "canceled", reason = request.Reason }));
|
||||
await eventPublisher.PublishAsync(envelope, cancellationToken);
|
||||
|
||||
return Results.Ok(new CancelPackRunResponse(packRunId, "canceled", request.Reason, now));
|
||||
}
|
||||
|
||||
private static async Task<IResult> RetryPackRun(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packRunId,
|
||||
[FromBody] RetryPackRunRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRunRepository packRunRepository,
|
||||
[FromServices] IEventPublisher eventPublisher,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
|
||||
var packRun = await packRunRepository.GetByIdAsync(tenantId, packRunId, cancellationToken);
|
||||
if (packRun is null)
|
||||
{
|
||||
return Results.NotFound(new PackRunErrorResponse(
|
||||
"not_found", $"Pack run {packRunId} not found", packRunId, null));
|
||||
}
|
||||
|
||||
if (!packRun.IsTerminal)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("not_terminal", $"Pack run is not in terminal status: {packRun.Status}", packRunId, null),
|
||||
statusCode: StatusCodes.Status409Conflict);
|
||||
}
|
||||
|
||||
if (packRun.Status == PackRunStatus.Succeeded)
|
||||
{
|
||||
return Results.Json(
|
||||
new PackRunErrorResponse("already_succeeded", "Cannot retry a successful pack run", packRunId, null),
|
||||
statusCode: StatusCodes.Status409Conflict);
|
||||
}
|
||||
|
||||
var now = timeProvider.GetUtcNow();
|
||||
var newPackRunId = Guid.NewGuid();
|
||||
var parameters = request.Parameters ?? packRun.Parameters;
|
||||
var parametersDigest = request.Parameters != null ? ComputeDigest(parameters) : packRun.ParametersDigest;
|
||||
var idempotencyKey = request.IdempotencyKey ?? $"retry:{packRunId}:{now:yyyyMMddHHmmss}";
|
||||
|
||||
var newPackRun = PackRun.Create(
|
||||
packRunId: newPackRunId,
|
||||
tenantId: tenantId,
|
||||
projectId: packRun.ProjectId,
|
||||
packId: packRun.PackId,
|
||||
packVersion: packRun.PackVersion,
|
||||
parameters: parameters,
|
||||
parametersDigest: parametersDigest,
|
||||
idempotencyKey: idempotencyKey,
|
||||
correlationId: packRun.CorrelationId,
|
||||
createdBy: context.User?.Identity?.Name ?? "system",
|
||||
priority: packRun.Priority,
|
||||
maxAttempts: packRun.MaxAttempts,
|
||||
metadata: $"{{\"retriedFrom\":\"{packRunId}\"}}",
|
||||
createdAt: now);
|
||||
|
||||
await packRunRepository.CreateAsync(newPackRun, cancellationToken);
|
||||
|
||||
OrchestratorMetrics.PackRunCreated(tenantId, packRun.PackId);
|
||||
|
||||
// Publish event
|
||||
var envelope = EventEnvelope.Create(
|
||||
eventType: OrchestratorEventType.PackRunCreated,
|
||||
tenantId: tenantId,
|
||||
actor: EventActor.User(context.User?.Identity?.Name ?? "system", "webservice"),
|
||||
correlationId: packRun.CorrelationId,
|
||||
projectId: packRun.ProjectId,
|
||||
payload: ToPayload(new { packRunId = newPackRunId, packId = packRun.PackId, retriedFrom = packRunId }));
|
||||
await eventPublisher.PublishAsync(envelope, cancellationToken);
|
||||
|
||||
return Results.Created($"/api/v1/orchestrator/pack-runs/{newPackRunId}", new RetryPackRunResponse(
|
||||
packRunId,
|
||||
newPackRunId,
|
||||
newPackRun.Status.ToString().ToLowerInvariant(),
|
||||
now));
|
||||
}
|
||||
|
||||
// ========== Helper Methods ==========
|
||||
|
||||
private static ClaimPackRunResponse CreateClaimResponse(PackRun packRun)
|
||||
{
|
||||
return new ClaimPackRunResponse(
|
||||
packRun.PackRunId,
|
||||
packRun.LeaseId!.Value,
|
||||
packRun.PackId,
|
||||
packRun.PackVersion,
|
||||
packRun.Parameters,
|
||||
packRun.ParametersDigest,
|
||||
packRun.Attempt,
|
||||
packRun.MaxAttempts,
|
||||
packRun.LeaseUntil!.Value,
|
||||
packRun.IdempotencyKey,
|
||||
packRun.CorrelationId,
|
||||
packRun.ProjectId,
|
||||
packRun.Metadata);
|
||||
}
|
||||
|
||||
private static string ComputeDigest(string content)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
private static JsonElement? ToPayload<T>(T value)
|
||||
{
|
||||
var json = JsonSerializer.SerializeToUtf8Bytes(value, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
});
|
||||
var doc = JsonDocument.Parse(json);
|
||||
return doc.RootElement.Clone();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
namespace StellaOps.Policy.Storage.Postgres.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Entity representing an audit log entry for the policy module.
|
||||
/// </summary>
|
||||
public sealed class PolicyAuditEntity
|
||||
{
|
||||
public long Id { get; init; }
|
||||
public required string TenantId { get; init; }
|
||||
public Guid? UserId { get; init; }
|
||||
public required string Action { get; init; }
|
||||
public required string ResourceType { get; init; }
|
||||
public string? ResourceId { get; init; }
|
||||
public string? OldValue { get; init; }
|
||||
public string? NewValue { get; init; }
|
||||
public string? CorrelationId { get; init; }
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,162 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.Policy.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Policy.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for explanation operations.
|
||||
/// </summary>
|
||||
public sealed class ExplanationRepository : RepositoryBase<PolicyDataSource>, IExplanationRepository
|
||||
{
|
||||
public ExplanationRepository(PolicyDataSource dataSource, ILogger<ExplanationRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<ExplanationEntity?> GetByIdAsync(Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, evaluation_run_id, rule_id, rule_name, result, severity, message, details, remediation, resource_path, line_number, created_at
|
||||
FROM policy.explanations WHERE id = @id
|
||||
""";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "id", id);
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
return await reader.ReadAsync(cancellationToken).ConfigureAwait(false) ? MapExplanation(reader) : null;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<ExplanationEntity>> GetByEvaluationRunIdAsync(Guid evaluationRunId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, evaluation_run_id, rule_id, rule_name, result, severity, message, details, remediation, resource_path, line_number, created_at
|
||||
FROM policy.explanations WHERE evaluation_run_id = @evaluation_run_id
|
||||
ORDER BY created_at
|
||||
""";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "evaluation_run_id", evaluationRunId);
|
||||
var results = new List<ExplanationEntity>();
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
results.Add(MapExplanation(reader));
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<ExplanationEntity>> GetByEvaluationRunIdAndResultAsync(Guid evaluationRunId, RuleResult result, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, evaluation_run_id, rule_id, rule_name, result, severity, message, details, remediation, resource_path, line_number, created_at
|
||||
FROM policy.explanations WHERE evaluation_run_id = @evaluation_run_id AND result = @result
|
||||
ORDER BY severity DESC, created_at
|
||||
""";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "evaluation_run_id", evaluationRunId);
|
||||
AddParameter(command, "result", ResultToString(result));
|
||||
var results = new List<ExplanationEntity>();
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
results.Add(MapExplanation(reader));
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<ExplanationEntity> CreateAsync(ExplanationEntity explanation, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
INSERT INTO policy.explanations (id, evaluation_run_id, rule_id, rule_name, result, severity, message, details, remediation, resource_path, line_number)
|
||||
VALUES (@id, @evaluation_run_id, @rule_id, @rule_name, @result, @severity, @message, @details::jsonb, @remediation, @resource_path, @line_number)
|
||||
RETURNING *
|
||||
""";
|
||||
var id = explanation.Id == Guid.Empty ? Guid.NewGuid() : explanation.Id;
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "id", id);
|
||||
AddParameter(command, "evaluation_run_id", explanation.EvaluationRunId);
|
||||
AddParameter(command, "rule_id", explanation.RuleId);
|
||||
AddParameter(command, "rule_name", explanation.RuleName);
|
||||
AddParameter(command, "result", ResultToString(explanation.Result));
|
||||
AddParameter(command, "severity", explanation.Severity);
|
||||
AddParameter(command, "message", explanation.Message);
|
||||
AddJsonbParameter(command, "details", explanation.Details);
|
||||
AddParameter(command, "remediation", explanation.Remediation);
|
||||
AddParameter(command, "resource_path", explanation.ResourcePath);
|
||||
AddParameter(command, "line_number", explanation.LineNumber);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
await reader.ReadAsync(cancellationToken).ConfigureAwait(false);
|
||||
return MapExplanation(reader);
|
||||
}
|
||||
|
||||
public async Task<int> CreateBatchAsync(IEnumerable<ExplanationEntity> explanations, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
INSERT INTO policy.explanations (id, evaluation_run_id, rule_id, rule_name, result, severity, message, details, remediation, resource_path, line_number)
|
||||
VALUES (@id, @evaluation_run_id, @rule_id, @rule_name, @result, @severity, @message, @details::jsonb, @remediation, @resource_path, @line_number)
|
||||
""";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
var count = 0;
|
||||
foreach (var explanation in explanations)
|
||||
{
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
var id = explanation.Id == Guid.Empty ? Guid.NewGuid() : explanation.Id;
|
||||
AddParameter(command, "id", id);
|
||||
AddParameter(command, "evaluation_run_id", explanation.EvaluationRunId);
|
||||
AddParameter(command, "rule_id", explanation.RuleId);
|
||||
AddParameter(command, "rule_name", explanation.RuleName);
|
||||
AddParameter(command, "result", ResultToString(explanation.Result));
|
||||
AddParameter(command, "severity", explanation.Severity);
|
||||
AddParameter(command, "message", explanation.Message);
|
||||
AddJsonbParameter(command, "details", explanation.Details);
|
||||
AddParameter(command, "remediation", explanation.Remediation);
|
||||
AddParameter(command, "resource_path", explanation.ResourcePath);
|
||||
AddParameter(command, "line_number", explanation.LineNumber);
|
||||
count += await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
public async Task<bool> DeleteByEvaluationRunIdAsync(Guid evaluationRunId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = "DELETE FROM policy.explanations WHERE evaluation_run_id = @evaluation_run_id";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "evaluation_run_id", evaluationRunId);
|
||||
var rows = await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
return rows > 0;
|
||||
}
|
||||
|
||||
private static ExplanationEntity MapExplanation(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
EvaluationRunId = reader.GetGuid(1),
|
||||
RuleId = GetNullableGuid(reader, 2),
|
||||
RuleName = reader.GetString(3),
|
||||
Result = ParseResult(reader.GetString(4)),
|
||||
Severity = reader.GetString(5),
|
||||
Message = GetNullableString(reader, 6),
|
||||
Details = reader.GetString(7),
|
||||
Remediation = GetNullableString(reader, 8),
|
||||
ResourcePath = GetNullableString(reader, 9),
|
||||
LineNumber = reader.IsDBNull(10) ? null : reader.GetInt32(10),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(11)
|
||||
};
|
||||
|
||||
private static string ResultToString(RuleResult result) => result switch
|
||||
{
|
||||
RuleResult.Pass => "pass",
|
||||
RuleResult.Fail => "fail",
|
||||
RuleResult.Skip => "skip",
|
||||
RuleResult.Error => "error",
|
||||
_ => throw new ArgumentException($"Unknown result: {result}")
|
||||
};
|
||||
|
||||
private static RuleResult ParseResult(string result) => result switch
|
||||
{
|
||||
"pass" => RuleResult.Pass,
|
||||
"fail" => RuleResult.Fail,
|
||||
"skip" => RuleResult.Skip,
|
||||
"error" => RuleResult.Error,
|
||||
_ => throw new ArgumentException($"Unknown result: {result}")
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
using StellaOps.Policy.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Policy.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for explanation operations.
|
||||
/// </summary>
|
||||
public interface IExplanationRepository
|
||||
{
|
||||
Task<ExplanationEntity?> GetByIdAsync(Guid id, CancellationToken cancellationToken = default);
|
||||
Task<IReadOnlyList<ExplanationEntity>> GetByEvaluationRunIdAsync(Guid evaluationRunId, CancellationToken cancellationToken = default);
|
||||
Task<IReadOnlyList<ExplanationEntity>> GetByEvaluationRunIdAndResultAsync(Guid evaluationRunId, RuleResult result, CancellationToken cancellationToken = default);
|
||||
Task<ExplanationEntity> CreateAsync(ExplanationEntity explanation, CancellationToken cancellationToken = default);
|
||||
Task<int> CreateBatchAsync(IEnumerable<ExplanationEntity> explanations, CancellationToken cancellationToken = default);
|
||||
Task<bool> DeleteByEvaluationRunIdAsync(Guid evaluationRunId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
using StellaOps.Policy.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Policy.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for policy audit operations.
|
||||
/// </summary>
|
||||
public interface IPolicyAuditRepository
|
||||
{
|
||||
Task<long> CreateAsync(PolicyAuditEntity audit, CancellationToken cancellationToken = default);
|
||||
Task<IReadOnlyList<PolicyAuditEntity>> ListAsync(string tenantId, int limit = 100, int offset = 0, CancellationToken cancellationToken = default);
|
||||
Task<IReadOnlyList<PolicyAuditEntity>> GetByResourceAsync(string tenantId, string resourceType, string? resourceId = null, int limit = 100, CancellationToken cancellationToken = default);
|
||||
Task<IReadOnlyList<PolicyAuditEntity>> GetByCorrelationIdAsync(string tenantId, string correlationId, CancellationToken cancellationToken = default);
|
||||
Task<int> DeleteOldAsync(DateTimeOffset cutoff, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,105 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.Policy.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Policy.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for policy audit operations.
|
||||
/// </summary>
|
||||
public sealed class PolicyAuditRepository : RepositoryBase<PolicyDataSource>, IPolicyAuditRepository
|
||||
{
|
||||
public PolicyAuditRepository(PolicyDataSource dataSource, ILogger<PolicyAuditRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<long> CreateAsync(PolicyAuditEntity audit, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
INSERT INTO policy.audit (tenant_id, user_id, action, resource_type, resource_id, old_value, new_value, correlation_id)
|
||||
VALUES (@tenant_id, @user_id, @action, @resource_type, @resource_id, @old_value::jsonb, @new_value::jsonb, @correlation_id)
|
||||
RETURNING id
|
||||
""";
|
||||
await using var connection = await DataSource.OpenConnectionAsync(audit.TenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "tenant_id", audit.TenantId);
|
||||
AddParameter(command, "user_id", audit.UserId);
|
||||
AddParameter(command, "action", audit.Action);
|
||||
AddParameter(command, "resource_type", audit.ResourceType);
|
||||
AddParameter(command, "resource_id", audit.ResourceId);
|
||||
AddJsonbParameter(command, "old_value", audit.OldValue);
|
||||
AddJsonbParameter(command, "new_value", audit.NewValue);
|
||||
AddParameter(command, "correlation_id", audit.CorrelationId);
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
|
||||
return (long)result!;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PolicyAuditEntity>> ListAsync(string tenantId, int limit = 100, int offset = 0, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, tenant_id, user_id, action, resource_type, resource_id, old_value, new_value, correlation_id, created_at
|
||||
FROM policy.audit WHERE tenant_id = @tenant_id
|
||||
ORDER BY created_at DESC LIMIT @limit OFFSET @offset
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "limit", limit);
|
||||
AddParameter(cmd, "offset", offset);
|
||||
}, MapAudit, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PolicyAuditEntity>> GetByResourceAsync(string tenantId, string resourceType, string? resourceId = null, int limit = 100, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = """
|
||||
SELECT id, tenant_id, user_id, action, resource_type, resource_id, old_value, new_value, correlation_id, created_at
|
||||
FROM policy.audit WHERE tenant_id = @tenant_id AND resource_type = @resource_type
|
||||
""";
|
||||
if (resourceId != null) sql += " AND resource_id = @resource_id";
|
||||
sql += " ORDER BY created_at DESC LIMIT @limit";
|
||||
|
||||
return await QueryAsync(tenantId, sql, cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "resource_type", resourceType);
|
||||
if (resourceId != null) AddParameter(cmd, "resource_id", resourceId);
|
||||
AddParameter(cmd, "limit", limit);
|
||||
}, MapAudit, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PolicyAuditEntity>> GetByCorrelationIdAsync(string tenantId, string correlationId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, tenant_id, user_id, action, resource_type, resource_id, old_value, new_value, correlation_id, created_at
|
||||
FROM policy.audit WHERE tenant_id = @tenant_id AND correlation_id = @correlation_id
|
||||
ORDER BY created_at
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "correlation_id", correlationId); },
|
||||
MapAudit, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<int> DeleteOldAsync(DateTimeOffset cutoff, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = "DELETE FROM policy.audit WHERE created_at < @cutoff";
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "cutoff", cutoff);
|
||||
return await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static PolicyAuditEntity MapAudit(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
Id = reader.GetInt64(0),
|
||||
TenantId = reader.GetString(1),
|
||||
UserId = GetNullableGuid(reader, 2),
|
||||
Action = reader.GetString(3),
|
||||
ResourceType = reader.GetString(4),
|
||||
ResourceId = GetNullableString(reader, 5),
|
||||
OldValue = GetNullableString(reader, 6),
|
||||
NewValue = GetNullableString(reader, 7),
|
||||
CorrelationId = GetNullableString(reader, 8),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(9)
|
||||
};
|
||||
}
|
||||
@@ -35,6 +35,8 @@ public static class ServiceCollectionExtensions
|
||||
services.AddScoped<IEvaluationRunRepository, EvaluationRunRepository>();
|
||||
services.AddScoped<IExceptionRepository, ExceptionRepository>();
|
||||
services.AddScoped<IReceiptRepository, PostgresReceiptRepository>();
|
||||
services.AddScoped<IExplanationRepository, ExplanationRepository>();
|
||||
services.AddScoped<IPolicyAuditRepository, PolicyAuditRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
@@ -60,6 +62,8 @@ public static class ServiceCollectionExtensions
|
||||
services.AddScoped<IEvaluationRunRepository, EvaluationRunRepository>();
|
||||
services.AddScoped<IExceptionRepository, ExceptionRepository>();
|
||||
services.AddScoped<IReceiptRepository, PostgresReceiptRepository>();
|
||||
services.AddScoped<IExplanationRepository, ExplanationRepository>();
|
||||
services.AddScoped<IPolicyAuditRepository, PolicyAuditRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,250 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Storage.Postgres.Models;
|
||||
using StellaOps.Policy.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(PolicyPostgresCollection.Name)]
|
||||
public sealed class EvaluationRunRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly PolicyPostgresFixture _fixture;
|
||||
private readonly EvaluationRunRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public EvaluationRunRepositoryTests(PolicyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new PolicyDataSource(Options.Create(options), NullLogger<PolicyDataSource>.Instance);
|
||||
_repository = new EvaluationRunRepository(dataSource, NullLogger<EvaluationRunRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetById_RoundTripsEvaluationRun()
|
||||
{
|
||||
// Arrange
|
||||
var run = new EvaluationRunEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ProjectId = "project-123",
|
||||
ArtifactId = "registry.example.com/app:v1.0",
|
||||
PackId = Guid.NewGuid(),
|
||||
PackVersion = 1,
|
||||
Status = EvaluationStatus.Pending
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(run);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, run.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(run.Id);
|
||||
fetched.ProjectId.Should().Be("project-123");
|
||||
fetched.Status.Should().Be(EvaluationStatus.Pending);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByProjectId_ReturnsProjectEvaluations()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateRun("project-abc");
|
||||
await _repository.CreateAsync(run);
|
||||
|
||||
// Act
|
||||
var runs = await _repository.GetByProjectIdAsync(_tenantId, "project-abc");
|
||||
|
||||
// Assert
|
||||
runs.Should().HaveCount(1);
|
||||
runs[0].ProjectId.Should().Be("project-abc");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByArtifactId_ReturnsArtifactEvaluations()
|
||||
{
|
||||
// Arrange
|
||||
var artifactId = "registry.example.com/app:v2.0";
|
||||
var run = new EvaluationRunEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ArtifactId = artifactId,
|
||||
Status = EvaluationStatus.Pending
|
||||
};
|
||||
await _repository.CreateAsync(run);
|
||||
|
||||
// Act
|
||||
var runs = await _repository.GetByArtifactIdAsync(_tenantId, artifactId);
|
||||
|
||||
// Assert
|
||||
runs.Should().HaveCount(1);
|
||||
runs[0].ArtifactId.Should().Be(artifactId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByStatus_ReturnsRunsWithStatus()
|
||||
{
|
||||
// Arrange
|
||||
var pendingRun = CreateRun("project-1");
|
||||
var completedRun = new EvaluationRunEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ProjectId = "project-2",
|
||||
Status = EvaluationStatus.Completed,
|
||||
Result = EvaluationResult.Pass
|
||||
};
|
||||
await _repository.CreateAsync(pendingRun);
|
||||
await _repository.CreateAsync(completedRun);
|
||||
|
||||
// Act
|
||||
var pendingRuns = await _repository.GetByStatusAsync(_tenantId, EvaluationStatus.Pending);
|
||||
|
||||
// Assert
|
||||
pendingRuns.Should().HaveCount(1);
|
||||
pendingRuns[0].ProjectId.Should().Be("project-1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetRecent_ReturnsRecentEvaluations()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.CreateAsync(CreateRun("project-1"));
|
||||
await _repository.CreateAsync(CreateRun("project-2"));
|
||||
|
||||
// Act
|
||||
var recentRuns = await _repository.GetRecentAsync(_tenantId, limit: 10);
|
||||
|
||||
// Assert
|
||||
recentRuns.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkStarted_UpdatesStatusAndStartedAt()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateRun("project-start");
|
||||
await _repository.CreateAsync(run);
|
||||
|
||||
// Act
|
||||
var result = await _repository.MarkStartedAsync(_tenantId, run.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, run.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Status.Should().Be(EvaluationStatus.Running);
|
||||
fetched.StartedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkCompleted_UpdatesAllCompletionFields()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateRun("project-complete");
|
||||
await _repository.CreateAsync(run);
|
||||
await _repository.MarkStartedAsync(_tenantId, run.Id);
|
||||
|
||||
// Act
|
||||
var result = await _repository.MarkCompletedAsync(
|
||||
_tenantId,
|
||||
run.Id,
|
||||
EvaluationResult.Fail,
|
||||
score: 65.5m,
|
||||
findingsCount: 10,
|
||||
criticalCount: 2,
|
||||
highCount: 3,
|
||||
mediumCount: 4,
|
||||
lowCount: 1,
|
||||
durationMs: 1500);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, run.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Status.Should().Be(EvaluationStatus.Completed);
|
||||
fetched.Result.Should().Be(EvaluationResult.Fail);
|
||||
fetched.Score.Should().Be(65.5m);
|
||||
fetched.FindingsCount.Should().Be(10);
|
||||
fetched.CriticalCount.Should().Be(2);
|
||||
fetched.DurationMs.Should().Be(1500);
|
||||
fetched.CompletedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkFailed_SetsErrorMessage()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateRun("project-fail");
|
||||
await _repository.CreateAsync(run);
|
||||
|
||||
// Act
|
||||
var result = await _repository.MarkFailedAsync(_tenantId, run.Id, "Policy engine timeout");
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, run.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Status.Should().Be(EvaluationStatus.Failed);
|
||||
fetched.Result.Should().Be(EvaluationResult.Error);
|
||||
fetched.ErrorMessage.Should().Be("Policy engine timeout");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStats_ReturnsCorrectStatistics()
|
||||
{
|
||||
// Arrange
|
||||
var passedRun = new EvaluationRunEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Status = EvaluationStatus.Completed,
|
||||
Result = EvaluationResult.Pass,
|
||||
Score = 100,
|
||||
FindingsCount = 0,
|
||||
CriticalCount = 0,
|
||||
HighCount = 0
|
||||
};
|
||||
var failedRun = new EvaluationRunEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Status = EvaluationStatus.Completed,
|
||||
Result = EvaluationResult.Fail,
|
||||
Score = 50,
|
||||
FindingsCount = 5,
|
||||
CriticalCount = 1,
|
||||
HighCount = 2
|
||||
};
|
||||
await _repository.CreateAsync(passedRun);
|
||||
await _repository.CreateAsync(failedRun);
|
||||
|
||||
var from = DateTimeOffset.UtcNow.AddHours(-1);
|
||||
var to = DateTimeOffset.UtcNow.AddHours(1);
|
||||
|
||||
// Act
|
||||
var stats = await _repository.GetStatsAsync(_tenantId, from, to);
|
||||
|
||||
// Assert
|
||||
stats.Total.Should().Be(2);
|
||||
stats.Passed.Should().Be(1);
|
||||
stats.Failed.Should().Be(1);
|
||||
stats.TotalFindings.Should().Be(5);
|
||||
stats.CriticalFindings.Should().Be(1);
|
||||
stats.HighFindings.Should().Be(2);
|
||||
}
|
||||
|
||||
private EvaluationRunEntity CreateRun(string projectId) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ProjectId = projectId,
|
||||
Status = EvaluationStatus.Pending
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,278 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Storage.Postgres.Models;
|
||||
using StellaOps.Policy.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(PolicyPostgresCollection.Name)]
|
||||
public sealed class ExceptionRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly PolicyPostgresFixture _fixture;
|
||||
private readonly ExceptionRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public ExceptionRepositoryTests(PolicyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new PolicyDataSource(Options.Create(options), NullLogger<PolicyDataSource>.Instance);
|
||||
_repository = new ExceptionRepository(dataSource, NullLogger<ExceptionRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetById_RoundTripsException()
|
||||
{
|
||||
// Arrange
|
||||
var exception = new ExceptionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "legacy-root-container",
|
||||
Description = "Allow root containers for legacy app",
|
||||
RulePattern = "no-root-containers",
|
||||
ProjectId = "project-legacy",
|
||||
Reason = "Legacy application requires root access",
|
||||
Status = ExceptionStatus.Active,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(30)
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(exception);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, exception.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(exception.Id);
|
||||
fetched.Name.Should().Be("legacy-root-container");
|
||||
fetched.Status.Should().Be(ExceptionStatus.Active);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByName_ReturnsCorrectException()
|
||||
{
|
||||
// Arrange
|
||||
var exception = CreateException("temp-waiver");
|
||||
await _repository.CreateAsync(exception);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "temp-waiver");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(exception.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAll_ReturnsAllExceptionsForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var exception1 = CreateException("exception1");
|
||||
var exception2 = CreateException("exception2");
|
||||
await _repository.CreateAsync(exception1);
|
||||
await _repository.CreateAsync(exception2);
|
||||
|
||||
// Act
|
||||
var exceptions = await _repository.GetAllAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
exceptions.Should().HaveCount(2);
|
||||
exceptions.Select(e => e.Name).Should().Contain(["exception1", "exception2"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAll_FiltersByStatus()
|
||||
{
|
||||
// Arrange
|
||||
var activeException = CreateException("active");
|
||||
var revokedException = new ExceptionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "revoked",
|
||||
Reason = "Test",
|
||||
Status = ExceptionStatus.Revoked
|
||||
};
|
||||
await _repository.CreateAsync(activeException);
|
||||
await _repository.CreateAsync(revokedException);
|
||||
|
||||
// Act
|
||||
var activeExceptions = await _repository.GetAllAsync(_tenantId, status: ExceptionStatus.Active);
|
||||
|
||||
// Assert
|
||||
activeExceptions.Should().HaveCount(1);
|
||||
activeExceptions[0].Name.Should().Be("active");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetActiveForProject_ReturnsProjectExceptions()
|
||||
{
|
||||
// Arrange
|
||||
var projectException = new ExceptionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "project-exception",
|
||||
ProjectId = "project-123",
|
||||
Reason = "Project-specific waiver",
|
||||
Status = ExceptionStatus.Active
|
||||
};
|
||||
var otherProjectException = new ExceptionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "other-exception",
|
||||
ProjectId = "project-456",
|
||||
Reason = "Other project waiver",
|
||||
Status = ExceptionStatus.Active
|
||||
};
|
||||
await _repository.CreateAsync(projectException);
|
||||
await _repository.CreateAsync(otherProjectException);
|
||||
|
||||
// Act
|
||||
var exceptions = await _repository.GetActiveForProjectAsync(_tenantId, "project-123");
|
||||
|
||||
// Assert
|
||||
exceptions.Should().HaveCount(1);
|
||||
exceptions[0].Name.Should().Be("project-exception");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetActiveForRule_ReturnsRuleExceptions()
|
||||
{
|
||||
// Arrange
|
||||
var ruleException = new ExceptionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "rule-exception",
|
||||
RulePattern = "no-root-containers",
|
||||
Reason = "Rule-specific waiver",
|
||||
Status = ExceptionStatus.Active
|
||||
};
|
||||
await _repository.CreateAsync(ruleException);
|
||||
|
||||
// Act
|
||||
var exceptions = await _repository.GetActiveForRuleAsync(_tenantId, "no-root-containers");
|
||||
|
||||
// Assert
|
||||
exceptions.Should().HaveCount(1);
|
||||
exceptions[0].Name.Should().Be("rule-exception");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Update_ModifiesException()
|
||||
{
|
||||
// Arrange
|
||||
var exception = CreateException("update-test");
|
||||
await _repository.CreateAsync(exception);
|
||||
|
||||
// Act
|
||||
var updated = new ExceptionEntity
|
||||
{
|
||||
Id = exception.Id,
|
||||
TenantId = _tenantId,
|
||||
Name = "update-test",
|
||||
Reason = "Updated reason",
|
||||
Description = "Updated description"
|
||||
};
|
||||
var result = await _repository.UpdateAsync(updated);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, exception.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Reason.Should().Be("Updated reason");
|
||||
fetched.Description.Should().Be("Updated description");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Approve_SetsApprovalDetails()
|
||||
{
|
||||
// Arrange
|
||||
var exception = CreateException("approve-test");
|
||||
await _repository.CreateAsync(exception);
|
||||
|
||||
// Act
|
||||
var result = await _repository.ApproveAsync(_tenantId, exception.Id, "admin@example.com");
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, exception.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.ApprovedBy.Should().Be("admin@example.com");
|
||||
fetched.ApprovedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_SetsRevokedStatusAndDetails()
|
||||
{
|
||||
// Arrange
|
||||
var exception = CreateException("revoke-test");
|
||||
await _repository.CreateAsync(exception);
|
||||
|
||||
// Act
|
||||
var result = await _repository.RevokeAsync(_tenantId, exception.Id, "admin@example.com");
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, exception.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Status.Should().Be(ExceptionStatus.Revoked);
|
||||
fetched.RevokedBy.Should().Be("admin@example.com");
|
||||
fetched.RevokedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Expire_ExpiresOldExceptions()
|
||||
{
|
||||
// Arrange - Create an exception that expires in the past
|
||||
var expiredException = new ExceptionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "expired",
|
||||
Reason = "Test",
|
||||
Status = ExceptionStatus.Active,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(-1)
|
||||
};
|
||||
await _repository.CreateAsync(expiredException);
|
||||
|
||||
// Act
|
||||
var count = await _repository.ExpireAsync(_tenantId);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, expiredException.Id);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(1);
|
||||
fetched!.Status.Should().Be(ExceptionStatus.Expired);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesException()
|
||||
{
|
||||
// Arrange
|
||||
var exception = CreateException("delete-test");
|
||||
await _repository.CreateAsync(exception);
|
||||
|
||||
// Act
|
||||
var result = await _repository.DeleteAsync(_tenantId, exception.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, exception.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
private ExceptionEntity CreateException(string name) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = name,
|
||||
Reason = "Test exception",
|
||||
Status = ExceptionStatus.Active
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,213 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Storage.Postgres.Models;
|
||||
using StellaOps.Policy.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(PolicyPostgresCollection.Name)]
|
||||
public sealed class PackRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly PolicyPostgresFixture _fixture;
|
||||
private readonly PackRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public PackRepositoryTests(PolicyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new PolicyDataSource(Options.Create(options), NullLogger<PolicyDataSource>.Instance);
|
||||
_repository = new PackRepository(dataSource, NullLogger<PackRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetById_RoundTripsPack()
|
||||
{
|
||||
// Arrange
|
||||
var pack = new PackEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "security-baseline",
|
||||
DisplayName = "Security Baseline Pack",
|
||||
Description = "Core security policy rules",
|
||||
IsBuiltin = false
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(pack);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, pack.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(pack.Id);
|
||||
fetched.Name.Should().Be("security-baseline");
|
||||
fetched.DisplayName.Should().Be("Security Baseline Pack");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByName_ReturnsCorrectPack()
|
||||
{
|
||||
// Arrange
|
||||
var pack = CreatePack("compliance-pack");
|
||||
await _repository.CreateAsync(pack);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "compliance-pack");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(pack.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAll_ReturnsAllPacksForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var pack1 = CreatePack("pack1");
|
||||
var pack2 = CreatePack("pack2");
|
||||
await _repository.CreateAsync(pack1);
|
||||
await _repository.CreateAsync(pack2);
|
||||
|
||||
// Act
|
||||
var packs = await _repository.GetAllAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
packs.Should().HaveCount(2);
|
||||
packs.Select(p => p.Name).Should().Contain(["pack1", "pack2"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAll_ExcludesDeprecated()
|
||||
{
|
||||
// Arrange
|
||||
var activePack = CreatePack("active");
|
||||
var deprecatedPack = new PackEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "deprecated",
|
||||
IsDeprecated = true
|
||||
};
|
||||
await _repository.CreateAsync(activePack);
|
||||
await _repository.CreateAsync(deprecatedPack);
|
||||
|
||||
// Act
|
||||
var packs = await _repository.GetAllAsync(_tenantId, includeDeprecated: false);
|
||||
|
||||
// Assert
|
||||
packs.Should().HaveCount(1);
|
||||
packs[0].Name.Should().Be("active");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBuiltin_ReturnsOnlyBuiltinPacks()
|
||||
{
|
||||
// Arrange
|
||||
var builtinPack = new PackEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "builtin",
|
||||
IsBuiltin = true
|
||||
};
|
||||
var customPack = CreatePack("custom");
|
||||
await _repository.CreateAsync(builtinPack);
|
||||
await _repository.CreateAsync(customPack);
|
||||
|
||||
// Act
|
||||
var builtinPacks = await _repository.GetBuiltinAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
builtinPacks.Should().HaveCount(1);
|
||||
builtinPacks[0].Name.Should().Be("builtin");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Update_ModifiesPack()
|
||||
{
|
||||
// Arrange
|
||||
var pack = CreatePack("update-test");
|
||||
await _repository.CreateAsync(pack);
|
||||
|
||||
// Act
|
||||
var updated = new PackEntity
|
||||
{
|
||||
Id = pack.Id,
|
||||
TenantId = _tenantId,
|
||||
Name = "update-test",
|
||||
DisplayName = "Updated Display Name",
|
||||
Description = "Updated description"
|
||||
};
|
||||
var result = await _repository.UpdateAsync(updated);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, pack.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.DisplayName.Should().Be("Updated Display Name");
|
||||
fetched.Description.Should().Be("Updated description");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SetActiveVersion_UpdatesActiveVersion()
|
||||
{
|
||||
// Arrange
|
||||
var pack = CreatePack("version-test");
|
||||
await _repository.CreateAsync(pack);
|
||||
|
||||
// Act
|
||||
var result = await _repository.SetActiveVersionAsync(_tenantId, pack.Id, 2);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, pack.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.ActiveVersion.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Deprecate_MarksParkAsDeprecated()
|
||||
{
|
||||
// Arrange
|
||||
var pack = CreatePack("deprecate-test");
|
||||
await _repository.CreateAsync(pack);
|
||||
|
||||
// Act
|
||||
var result = await _repository.DeprecateAsync(_tenantId, pack.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, pack.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.IsDeprecated.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesPack()
|
||||
{
|
||||
// Arrange
|
||||
var pack = CreatePack("delete-test");
|
||||
await _repository.CreateAsync(pack);
|
||||
|
||||
// Act
|
||||
var result = await _repository.DeleteAsync(_tenantId, pack.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, pack.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
private PackEntity CreatePack(string name) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = name,
|
||||
IsBuiltin = false
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,191 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Storage.Postgres.Models;
|
||||
using StellaOps.Policy.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(PolicyPostgresCollection.Name)]
|
||||
public sealed class PolicyAuditRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly PolicyPostgresFixture _fixture;
|
||||
private readonly PolicyAuditRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public PolicyAuditRepositoryTests(PolicyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new PolicyDataSource(Options.Create(options), NullLogger<PolicyDataSource>.Instance);
|
||||
_repository = new PolicyAuditRepository(dataSource, NullLogger<PolicyAuditRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task Create_ReturnsGeneratedId()
|
||||
{
|
||||
// Arrange
|
||||
var audit = new PolicyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
Action = "pack.created",
|
||||
ResourceType = "pack",
|
||||
ResourceId = Guid.NewGuid().ToString()
|
||||
};
|
||||
|
||||
// Act
|
||||
var id = await _repository.CreateAsync(audit);
|
||||
|
||||
// Assert
|
||||
id.Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAuditEntriesOrderedByCreatedAtDesc()
|
||||
{
|
||||
// Arrange
|
||||
var audit1 = CreateAudit("action1");
|
||||
var audit2 = CreateAudit("action2");
|
||||
await _repository.CreateAsync(audit1);
|
||||
await Task.Delay(10);
|
||||
await _repository.CreateAsync(audit2);
|
||||
|
||||
// Act
|
||||
var audits = await _repository.ListAsync(_tenantId, limit: 10);
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(2);
|
||||
audits[0].Action.Should().Be("action2"); // Most recent first
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByResource_ReturnsResourceAudits()
|
||||
{
|
||||
// Arrange
|
||||
var resourceId = Guid.NewGuid().ToString();
|
||||
var audit = new PolicyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "exception.updated",
|
||||
ResourceType = "exception",
|
||||
ResourceId = resourceId
|
||||
};
|
||||
await _repository.CreateAsync(audit);
|
||||
|
||||
// Act
|
||||
var audits = await _repository.GetByResourceAsync(_tenantId, "exception", resourceId);
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(1);
|
||||
audits[0].ResourceId.Should().Be(resourceId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByResource_WithoutResourceId_ReturnsAllOfType()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.CreateAsync(new PolicyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "pack.created",
|
||||
ResourceType = "pack",
|
||||
ResourceId = Guid.NewGuid().ToString()
|
||||
});
|
||||
await _repository.CreateAsync(new PolicyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "pack.updated",
|
||||
ResourceType = "pack",
|
||||
ResourceId = Guid.NewGuid().ToString()
|
||||
});
|
||||
|
||||
// Act
|
||||
var audits = await _repository.GetByResourceAsync(_tenantId, "pack");
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCorrelationId_ReturnsCorrelatedAudits()
|
||||
{
|
||||
// Arrange
|
||||
var correlationId = Guid.NewGuid().ToString();
|
||||
var audit1 = new PolicyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "evaluation.started",
|
||||
ResourceType = "evaluation",
|
||||
CorrelationId = correlationId
|
||||
};
|
||||
var audit2 = new PolicyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "evaluation.completed",
|
||||
ResourceType = "evaluation",
|
||||
CorrelationId = correlationId
|
||||
};
|
||||
await _repository.CreateAsync(audit1);
|
||||
await _repository.CreateAsync(audit2);
|
||||
|
||||
// Act
|
||||
var audits = await _repository.GetByCorrelationIdAsync(_tenantId, correlationId);
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(2);
|
||||
audits.Should().AllSatisfy(a => a.CorrelationId.Should().Be(correlationId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Create_StoresJsonbValues()
|
||||
{
|
||||
// Arrange
|
||||
var audit = new PolicyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
Action = "profile.updated",
|
||||
ResourceType = "risk_profile",
|
||||
OldValue = "{\"threshold\": 7.0}",
|
||||
NewValue = "{\"threshold\": 8.0}"
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(audit);
|
||||
var audits = await _repository.GetByResourceAsync(_tenantId, "risk_profile");
|
||||
|
||||
// Assert
|
||||
audits.Should().HaveCount(1);
|
||||
audits[0].OldValue.Should().Contain("7.0");
|
||||
audits[0].NewValue.Should().Contain("8.0");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteOld_RemovesOldAudits()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.CreateAsync(CreateAudit("old-action"));
|
||||
|
||||
// Act - Delete audits older than future date
|
||||
var cutoff = DateTimeOffset.UtcNow.AddMinutes(1);
|
||||
var count = await _repository.DeleteOldAsync(cutoff);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(1);
|
||||
}
|
||||
|
||||
private PolicyAuditEntity CreateAudit(string action) => new()
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
Action = action,
|
||||
ResourceType = "test",
|
||||
ResourceId = Guid.NewGuid().ToString()
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,274 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Storage.Postgres.Models;
|
||||
using StellaOps.Policy.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(PolicyPostgresCollection.Name)]
|
||||
public sealed class RiskProfileRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly PolicyPostgresFixture _fixture;
|
||||
private readonly RiskProfileRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public RiskProfileRepositoryTests(PolicyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new PolicyDataSource(Options.Create(options), NullLogger<PolicyDataSource>.Instance);
|
||||
_repository = new RiskProfileRepository(dataSource, NullLogger<RiskProfileRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetById_RoundTripsRiskProfile()
|
||||
{
|
||||
// Arrange
|
||||
var profile = new RiskProfileEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "default",
|
||||
DisplayName = "Default Risk Profile",
|
||||
Description = "Standard risk scoring profile",
|
||||
Version = 1,
|
||||
IsActive = true,
|
||||
Thresholds = "{\"critical\": 9.0, \"high\": 7.0}",
|
||||
ScoringWeights = "{\"vulnerability\": 1.0, \"configuration\": 0.5}"
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(profile);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, profile.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(profile.Id);
|
||||
fetched.Name.Should().Be("default");
|
||||
fetched.Version.Should().Be(1);
|
||||
fetched.IsActive.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetActiveByName_ReturnsActiveVersion()
|
||||
{
|
||||
// Arrange
|
||||
var inactiveProfile = new RiskProfileEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "versioned-profile",
|
||||
Version = 1,
|
||||
IsActive = false
|
||||
};
|
||||
var activeProfile = new RiskProfileEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "versioned-profile",
|
||||
Version = 2,
|
||||
IsActive = true
|
||||
};
|
||||
await _repository.CreateAsync(inactiveProfile);
|
||||
await _repository.CreateAsync(activeProfile);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetActiveByNameAsync(_tenantId, "versioned-profile");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Version.Should().Be(2);
|
||||
fetched.IsActive.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAll_ReturnsProfilesForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var profile1 = CreateProfile("profile1");
|
||||
var profile2 = CreateProfile("profile2");
|
||||
await _repository.CreateAsync(profile1);
|
||||
await _repository.CreateAsync(profile2);
|
||||
|
||||
// Act
|
||||
var profiles = await _repository.GetAllAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
profiles.Should().HaveCount(2);
|
||||
profiles.Select(p => p.Name).Should().Contain(["profile1", "profile2"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAll_FiltersActiveOnly()
|
||||
{
|
||||
// Arrange
|
||||
var activeProfile = CreateProfile("active");
|
||||
var inactiveProfile = new RiskProfileEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "inactive",
|
||||
IsActive = false
|
||||
};
|
||||
await _repository.CreateAsync(activeProfile);
|
||||
await _repository.CreateAsync(inactiveProfile);
|
||||
|
||||
// Act
|
||||
var activeProfiles = await _repository.GetAllAsync(_tenantId, activeOnly: true);
|
||||
|
||||
// Assert
|
||||
activeProfiles.Should().HaveCount(1);
|
||||
activeProfiles[0].Name.Should().Be("active");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetVersionsByName_ReturnsAllVersions()
|
||||
{
|
||||
// Arrange
|
||||
var v1 = new RiskProfileEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "multi-version",
|
||||
Version = 1,
|
||||
IsActive = false
|
||||
};
|
||||
var v2 = new RiskProfileEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "multi-version",
|
||||
Version = 2,
|
||||
IsActive = true
|
||||
};
|
||||
await _repository.CreateAsync(v1);
|
||||
await _repository.CreateAsync(v2);
|
||||
|
||||
// Act
|
||||
var versions = await _repository.GetVersionsByNameAsync(_tenantId, "multi-version");
|
||||
|
||||
// Assert
|
||||
versions.Should().HaveCount(2);
|
||||
versions.Select(v => v.Version).Should().Contain([1, 2]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Update_ModifiesProfile()
|
||||
{
|
||||
// Arrange
|
||||
var profile = CreateProfile("update-test");
|
||||
await _repository.CreateAsync(profile);
|
||||
|
||||
// Act
|
||||
var updated = new RiskProfileEntity
|
||||
{
|
||||
Id = profile.Id,
|
||||
TenantId = _tenantId,
|
||||
Name = "update-test",
|
||||
DisplayName = "Updated Display Name",
|
||||
Description = "Updated description",
|
||||
Thresholds = "{\"critical\": 8.0}"
|
||||
};
|
||||
var result = await _repository.UpdateAsync(updated);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, profile.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.DisplayName.Should().Be("Updated Display Name");
|
||||
fetched.Thresholds.Should().Contain("8.0");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateVersion_CreatesNewVersion()
|
||||
{
|
||||
// Arrange
|
||||
var original = CreateProfile("version-create");
|
||||
await _repository.CreateAsync(original);
|
||||
|
||||
// Act
|
||||
var newVersion = new RiskProfileEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "version-create",
|
||||
DisplayName = "New Version",
|
||||
Version = 2,
|
||||
IsActive = true
|
||||
};
|
||||
var created = await _repository.CreateVersionAsync(_tenantId, "version-create", newVersion);
|
||||
|
||||
// Assert
|
||||
created.Should().NotBeNull();
|
||||
created.Version.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Activate_SetsProfileAsActive()
|
||||
{
|
||||
// Arrange
|
||||
var profile = new RiskProfileEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "activate-test",
|
||||
IsActive = false
|
||||
};
|
||||
await _repository.CreateAsync(profile);
|
||||
|
||||
// Act
|
||||
var result = await _repository.ActivateAsync(_tenantId, profile.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, profile.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.IsActive.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Deactivate_SetsProfileAsInactive()
|
||||
{
|
||||
// Arrange
|
||||
var profile = CreateProfile("deactivate-test");
|
||||
await _repository.CreateAsync(profile);
|
||||
|
||||
// Act
|
||||
var result = await _repository.DeactivateAsync(_tenantId, profile.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, profile.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.IsActive.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesProfile()
|
||||
{
|
||||
// Arrange
|
||||
var profile = CreateProfile("delete-test");
|
||||
await _repository.CreateAsync(profile);
|
||||
|
||||
// Act
|
||||
var result = await _repository.DeleteAsync(_tenantId, profile.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, profile.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
private RiskProfileEntity CreateProfile(string name) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = name,
|
||||
Version = 1,
|
||||
IsActive = true
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,231 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Storage.Postgres.Models;
|
||||
using StellaOps.Policy.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(PolicyPostgresCollection.Name)]
|
||||
public sealed class RuleRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly PolicyPostgresFixture _fixture;
|
||||
private readonly RuleRepository _repository;
|
||||
private readonly Guid _packVersionId = Guid.NewGuid();
|
||||
|
||||
public RuleRepositoryTests(PolicyPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new PolicyDataSource(Options.Create(options), NullLogger<PolicyDataSource>.Instance);
|
||||
_repository = new RuleRepository(dataSource, NullLogger<RuleRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetById_RoundTripsRule()
|
||||
{
|
||||
// Arrange
|
||||
var rule = new RuleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
PackVersionId = _packVersionId,
|
||||
Name = "no-root-containers",
|
||||
Description = "Containers should not run as root",
|
||||
RuleType = RuleType.Rego,
|
||||
Content = "package container\ndefault allow = false",
|
||||
ContentHash = "abc123",
|
||||
Severity = RuleSeverity.High,
|
||||
Category = "security",
|
||||
Tags = ["container", "security"]
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(rule);
|
||||
var fetched = await _repository.GetByIdAsync(rule.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(rule.Id);
|
||||
fetched.Name.Should().Be("no-root-containers");
|
||||
fetched.Severity.Should().Be(RuleSeverity.High);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByName_ReturnsCorrectRule()
|
||||
{
|
||||
// Arrange
|
||||
var rule = CreateRule("required-labels");
|
||||
await _repository.CreateAsync(rule);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByNameAsync(_packVersionId, "required-labels");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(rule.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateBatch_CreatesMultipleRules()
|
||||
{
|
||||
// Arrange
|
||||
var rules = new[]
|
||||
{
|
||||
CreateRule("rule1"),
|
||||
CreateRule("rule2"),
|
||||
CreateRule("rule3")
|
||||
};
|
||||
|
||||
// Act
|
||||
var count = await _repository.CreateBatchAsync(rules);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByPackVersionId_ReturnsAllRulesForVersion()
|
||||
{
|
||||
// Arrange
|
||||
var rule1 = CreateRule("rule1");
|
||||
var rule2 = CreateRule("rule2");
|
||||
await _repository.CreateAsync(rule1);
|
||||
await _repository.CreateAsync(rule2);
|
||||
|
||||
// Act
|
||||
var rules = await _repository.GetByPackVersionIdAsync(_packVersionId);
|
||||
|
||||
// Assert
|
||||
rules.Should().HaveCount(2);
|
||||
rules.Select(r => r.Name).Should().Contain(["rule1", "rule2"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBySeverity_ReturnsRulesWithSeverity()
|
||||
{
|
||||
// Arrange
|
||||
var criticalRule = new RuleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
PackVersionId = _packVersionId,
|
||||
Name = "critical-rule",
|
||||
Content = "content",
|
||||
ContentHash = "hash",
|
||||
Severity = RuleSeverity.Critical
|
||||
};
|
||||
var lowRule = new RuleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
PackVersionId = _packVersionId,
|
||||
Name = "low-rule",
|
||||
Content = "content",
|
||||
ContentHash = "hash2",
|
||||
Severity = RuleSeverity.Low
|
||||
};
|
||||
await _repository.CreateAsync(criticalRule);
|
||||
await _repository.CreateAsync(lowRule);
|
||||
|
||||
// Act
|
||||
var criticalRules = await _repository.GetBySeverityAsync(_packVersionId, RuleSeverity.Critical);
|
||||
|
||||
// Assert
|
||||
criticalRules.Should().HaveCount(1);
|
||||
criticalRules[0].Name.Should().Be("critical-rule");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCategory_ReturnsRulesInCategory()
|
||||
{
|
||||
// Arrange
|
||||
var securityRule = new RuleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
PackVersionId = _packVersionId,
|
||||
Name = "security-rule",
|
||||
Content = "content",
|
||||
ContentHash = "hash",
|
||||
Category = "security"
|
||||
};
|
||||
var complianceRule = new RuleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
PackVersionId = _packVersionId,
|
||||
Name = "compliance-rule",
|
||||
Content = "content",
|
||||
ContentHash = "hash2",
|
||||
Category = "compliance"
|
||||
};
|
||||
await _repository.CreateAsync(securityRule);
|
||||
await _repository.CreateAsync(complianceRule);
|
||||
|
||||
// Act
|
||||
var securityRules = await _repository.GetByCategoryAsync(_packVersionId, "security");
|
||||
|
||||
// Assert
|
||||
securityRules.Should().HaveCount(1);
|
||||
securityRules[0].Name.Should().Be("security-rule");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByTag_ReturnsRulesWithTag()
|
||||
{
|
||||
// Arrange
|
||||
var containerRule = new RuleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
PackVersionId = _packVersionId,
|
||||
Name = "container-rule",
|
||||
Content = "content",
|
||||
ContentHash = "hash",
|
||||
Tags = ["container", "docker"]
|
||||
};
|
||||
var networkRule = new RuleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
PackVersionId = _packVersionId,
|
||||
Name = "network-rule",
|
||||
Content = "content",
|
||||
ContentHash = "hash2",
|
||||
Tags = ["network"]
|
||||
};
|
||||
await _repository.CreateAsync(containerRule);
|
||||
await _repository.CreateAsync(networkRule);
|
||||
|
||||
// Act
|
||||
var containerRules = await _repository.GetByTagAsync(_packVersionId, "container");
|
||||
|
||||
// Assert
|
||||
containerRules.Should().HaveCount(1);
|
||||
containerRules[0].Name.Should().Be("container-rule");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CountByPackVersionId_ReturnsCorrectCount()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.CreateAsync(CreateRule("rule1"));
|
||||
await _repository.CreateAsync(CreateRule("rule2"));
|
||||
await _repository.CreateAsync(CreateRule("rule3"));
|
||||
|
||||
// Act
|
||||
var count = await _repository.CountByPackVersionIdAsync(_packVersionId);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(3);
|
||||
}
|
||||
|
||||
private RuleEntity CreateRule(string name) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
PackVersionId = _packVersionId,
|
||||
Name = name,
|
||||
Content = "package test",
|
||||
ContentHash = Guid.NewGuid().ToString()
|
||||
};
|
||||
}
|
||||
@@ -28,6 +28,11 @@ public static class ServiceCollectionExtensions
|
||||
|
||||
// Register repositories
|
||||
services.AddScoped<IJobRepository, JobRepository>();
|
||||
services.AddScoped<ITriggerRepository, TriggerRepository>();
|
||||
services.AddScoped<IWorkerRepository, WorkerRepository>();
|
||||
services.AddScoped<IDistributedLockRepository, DistributedLockRepository>();
|
||||
services.AddScoped<IJobHistoryRepository, JobHistoryRepository>();
|
||||
services.AddScoped<IMetricsRepository, MetricsRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
@@ -47,6 +52,11 @@ public static class ServiceCollectionExtensions
|
||||
|
||||
// Register repositories
|
||||
services.AddScoped<IJobRepository, JobRepository>();
|
||||
services.AddScoped<ITriggerRepository, TriggerRepository>();
|
||||
services.AddScoped<IWorkerRepository, WorkerRepository>();
|
||||
services.AddScoped<IDistributedLockRepository, DistributedLockRepository>();
|
||||
services.AddScoped<IJobHistoryRepository, JobHistoryRepository>();
|
||||
services.AddScoped<IMetricsRepository, MetricsRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,129 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scheduler.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(SchedulerPostgresCollection.Name)]
|
||||
public sealed class DistributedLockRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly SchedulerPostgresFixture _fixture;
|
||||
private readonly DistributedLockRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public DistributedLockRepositoryTests(SchedulerPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new SchedulerDataSource(Options.Create(options), NullLogger<SchedulerDataSource>.Instance);
|
||||
_repository = new DistributedLockRepository(dataSource, NullLogger<DistributedLockRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task TryAcquire_SucceedsOnFirstAttempt()
|
||||
{
|
||||
// Arrange
|
||||
var lockKey = $"test-lock-{Guid.NewGuid()}";
|
||||
|
||||
// Act
|
||||
var acquired = await _repository.TryAcquireAsync(_tenantId, lockKey, "worker-1", TimeSpan.FromMinutes(5));
|
||||
|
||||
// Assert
|
||||
acquired.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TryAcquire_FailsWhenAlreadyHeld()
|
||||
{
|
||||
// Arrange
|
||||
var lockKey = $"contended-lock-{Guid.NewGuid()}";
|
||||
await _repository.TryAcquireAsync(_tenantId, lockKey, "worker-1", TimeSpan.FromMinutes(5));
|
||||
|
||||
// Act
|
||||
var secondAcquire = await _repository.TryAcquireAsync(_tenantId, lockKey, "worker-2", TimeSpan.FromMinutes(5));
|
||||
|
||||
// Assert
|
||||
secondAcquire.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Release_AllowsReacquisition()
|
||||
{
|
||||
// Arrange
|
||||
var lockKey = $"release-test-{Guid.NewGuid()}";
|
||||
await _repository.TryAcquireAsync(_tenantId, lockKey, "worker-1", TimeSpan.FromMinutes(5));
|
||||
|
||||
// Act
|
||||
await _repository.ReleaseAsync(lockKey, "worker-1");
|
||||
var reacquired = await _repository.TryAcquireAsync(_tenantId, lockKey, "worker-2", TimeSpan.FromMinutes(5));
|
||||
|
||||
// Assert
|
||||
reacquired.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Extend_ExtendsLockDuration()
|
||||
{
|
||||
// Arrange
|
||||
var lockKey = $"extend-test-{Guid.NewGuid()}";
|
||||
await _repository.TryAcquireAsync(_tenantId, lockKey, "worker-1", TimeSpan.FromMinutes(1));
|
||||
|
||||
// Act
|
||||
var extended = await _repository.ExtendAsync(lockKey, "worker-1", TimeSpan.FromMinutes(10));
|
||||
|
||||
// Assert
|
||||
extended.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Extend_FailsForDifferentHolder()
|
||||
{
|
||||
// Arrange
|
||||
var lockKey = $"extend-fail-{Guid.NewGuid()}";
|
||||
await _repository.TryAcquireAsync(_tenantId, lockKey, "worker-1", TimeSpan.FromMinutes(5));
|
||||
|
||||
// Act
|
||||
var extended = await _repository.ExtendAsync(lockKey, "worker-2", TimeSpan.FromMinutes(10));
|
||||
|
||||
// Assert
|
||||
extended.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Get_ReturnsLockInfo()
|
||||
{
|
||||
// Arrange
|
||||
var lockKey = $"get-test-{Guid.NewGuid()}";
|
||||
await _repository.TryAcquireAsync(_tenantId, lockKey, "worker-1", TimeSpan.FromMinutes(5));
|
||||
|
||||
// Act
|
||||
var lockInfo = await _repository.GetAsync(lockKey);
|
||||
|
||||
// Assert
|
||||
lockInfo.Should().NotBeNull();
|
||||
lockInfo!.HolderId.Should().Be("worker-1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListByTenant_ReturnsTenantsLocks()
|
||||
{
|
||||
// Arrange
|
||||
var lockKey1 = $"tenant-lock-1-{Guid.NewGuid()}";
|
||||
var lockKey2 = $"tenant-lock-2-{Guid.NewGuid()}";
|
||||
await _repository.TryAcquireAsync(_tenantId, lockKey1, "worker-1", TimeSpan.FromMinutes(5));
|
||||
await _repository.TryAcquireAsync(_tenantId, lockKey2, "worker-1", TimeSpan.FromMinutes(5));
|
||||
|
||||
// Act
|
||||
var locks = await _repository.ListByTenantAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
locks.Should().HaveCount(2);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,198 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Models;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scheduler.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(SchedulerPostgresCollection.Name)]
|
||||
public sealed class TriggerRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly SchedulerPostgresFixture _fixture;
|
||||
private readonly TriggerRepository _repository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public TriggerRepositoryTests(SchedulerPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new SchedulerDataSource(Options.Create(options), NullLogger<SchedulerDataSource>.Instance);
|
||||
_repository = new TriggerRepository(dataSource, NullLogger<TriggerRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGet_RoundTripsTrigger()
|
||||
{
|
||||
// Arrange
|
||||
var trigger = new TriggerEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "daily-scan",
|
||||
Description = "Daily vulnerability scan",
|
||||
JobType = "scan",
|
||||
JobPayload = "{\"target\": \"registry.example.com\"}",
|
||||
CronExpression = "0 0 * * *",
|
||||
Timezone = "UTC",
|
||||
Enabled = true,
|
||||
NextFireAt = DateTimeOffset.UtcNow.AddDays(1)
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(trigger);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, trigger.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(trigger.Id);
|
||||
fetched.Name.Should().Be("daily-scan");
|
||||
fetched.JobType.Should().Be("scan");
|
||||
fetched.CronExpression.Should().Be("0 0 * * *");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByName_ReturnsCorrectTrigger()
|
||||
{
|
||||
// Arrange
|
||||
var trigger = CreateTrigger("weekly-report", "0 0 * * 0");
|
||||
await _repository.CreateAsync(trigger);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "weekly-report");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(trigger.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAllTriggersForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var trigger1 = CreateTrigger("trigger1", "0 * * * *");
|
||||
var trigger2 = CreateTrigger("trigger2", "0 0 * * *");
|
||||
await _repository.CreateAsync(trigger1);
|
||||
await _repository.CreateAsync(trigger2);
|
||||
|
||||
// Act
|
||||
var triggers = await _repository.ListAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
triggers.Should().HaveCount(2);
|
||||
triggers.Select(t => t.Name).Should().Contain(["trigger1", "trigger2"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetDueTriggers_ReturnsTriggersReadyToFire()
|
||||
{
|
||||
// Arrange - One due trigger, one future trigger
|
||||
var dueTrigger = CreateTrigger("due", "* * * * *");
|
||||
dueTrigger = new TriggerEntity
|
||||
{
|
||||
Id = dueTrigger.Id,
|
||||
TenantId = dueTrigger.TenantId,
|
||||
Name = dueTrigger.Name,
|
||||
JobType = dueTrigger.JobType,
|
||||
CronExpression = dueTrigger.CronExpression,
|
||||
NextFireAt = DateTimeOffset.UtcNow.AddMinutes(-1), // Due
|
||||
Enabled = true
|
||||
};
|
||||
|
||||
var futureTrigger = CreateTrigger("future", "0 0 * * *");
|
||||
futureTrigger = new TriggerEntity
|
||||
{
|
||||
Id = futureTrigger.Id,
|
||||
TenantId = futureTrigger.TenantId,
|
||||
Name = futureTrigger.Name,
|
||||
JobType = futureTrigger.JobType,
|
||||
CronExpression = futureTrigger.CronExpression,
|
||||
NextFireAt = DateTimeOffset.UtcNow.AddDays(1), // Not due
|
||||
Enabled = true
|
||||
};
|
||||
|
||||
await _repository.CreateAsync(dueTrigger);
|
||||
await _repository.CreateAsync(futureTrigger);
|
||||
|
||||
// Act
|
||||
var dueTriggers = await _repository.GetDueTriggersAsync();
|
||||
|
||||
// Assert
|
||||
dueTriggers.Should().HaveCount(1);
|
||||
dueTriggers[0].Name.Should().Be("due");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordFire_UpdatesTriggerState()
|
||||
{
|
||||
// Arrange
|
||||
var trigger = CreateTrigger("fire-test", "* * * * *");
|
||||
await _repository.CreateAsync(trigger);
|
||||
var jobId = Guid.NewGuid();
|
||||
var nextFireAt = DateTimeOffset.UtcNow.AddMinutes(1);
|
||||
|
||||
// Act
|
||||
var result = await _repository.RecordFireAsync(_tenantId, trigger.Id, jobId, nextFireAt);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, trigger.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.LastJobId.Should().Be(jobId);
|
||||
fetched.NextFireAt.Should().BeCloseTo(nextFireAt, TimeSpan.FromSeconds(1));
|
||||
fetched.FireCount.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SetEnabled_TogglesEnableState()
|
||||
{
|
||||
// Arrange
|
||||
var trigger = CreateTrigger("toggle-test", "* * * * *");
|
||||
await _repository.CreateAsync(trigger);
|
||||
|
||||
// Act - Disable
|
||||
await _repository.SetEnabledAsync(_tenantId, trigger.Id, false);
|
||||
var disabled = await _repository.GetByIdAsync(_tenantId, trigger.Id);
|
||||
|
||||
// Assert
|
||||
disabled!.Enabled.Should().BeFalse();
|
||||
|
||||
// Act - Re-enable
|
||||
await _repository.SetEnabledAsync(_tenantId, trigger.Id, true);
|
||||
var enabled = await _repository.GetByIdAsync(_tenantId, trigger.Id);
|
||||
|
||||
// Assert
|
||||
enabled!.Enabled.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesTrigger()
|
||||
{
|
||||
// Arrange
|
||||
var trigger = CreateTrigger("delete-test", "* * * * *");
|
||||
await _repository.CreateAsync(trigger);
|
||||
|
||||
// Act
|
||||
await _repository.DeleteAsync(_tenantId, trigger.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, trigger.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
private TriggerEntity CreateTrigger(string name, string cron) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = name,
|
||||
JobType = "test-job",
|
||||
CronExpression = cron,
|
||||
Enabled = true,
|
||||
NextFireAt = DateTimeOffset.UtcNow.AddHours(1)
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Models;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scheduler.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(SchedulerPostgresCollection.Name)]
|
||||
public sealed class WorkerRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly SchedulerPostgresFixture _fixture;
|
||||
private readonly WorkerRepository _repository;
|
||||
|
||||
public WorkerRepositoryTests(SchedulerPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new SchedulerDataSource(Options.Create(options), NullLogger<SchedulerDataSource>.Instance);
|
||||
_repository = new WorkerRepository(dataSource, NullLogger<WorkerRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndGet_RoundTripsWorker()
|
||||
{
|
||||
// Arrange
|
||||
var worker = new WorkerEntity
|
||||
{
|
||||
Id = $"worker-{Guid.NewGuid()}",
|
||||
Hostname = "node-01.cluster.local",
|
||||
Status = WorkerStatus.Active,
|
||||
JobTypes = ["scan", "sbom"],
|
||||
MaxConcurrentJobs = 4
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.UpsertAsync(worker);
|
||||
var fetched = await _repository.GetByIdAsync(worker.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(worker.Id);
|
||||
fetched.Hostname.Should().Be("node-01.cluster.local");
|
||||
fetched.JobTypes.Should().BeEquivalentTo(["scan", "sbom"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Heartbeat_UpdatesLastHeartbeat()
|
||||
{
|
||||
// Arrange
|
||||
var worker = CreateWorker();
|
||||
await _repository.UpsertAsync(worker);
|
||||
|
||||
// Act
|
||||
await Task.Delay(100); // Ensure time difference
|
||||
await _repository.HeartbeatAsync(worker.Id, 2);
|
||||
var fetched = await _repository.GetByIdAsync(worker.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.LastHeartbeatAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5));
|
||||
fetched.CurrentJobs.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListByStatus_ReturnsWorkersWithStatus()
|
||||
{
|
||||
// Arrange
|
||||
var activeWorker = CreateWorker();
|
||||
var drainingWorker = new WorkerEntity
|
||||
{
|
||||
Id = $"draining-{Guid.NewGuid()}",
|
||||
Hostname = "node-02",
|
||||
Status = WorkerStatus.Draining,
|
||||
JobTypes = ["scan"],
|
||||
MaxConcurrentJobs = 4
|
||||
};
|
||||
await _repository.UpsertAsync(activeWorker);
|
||||
await _repository.UpsertAsync(drainingWorker);
|
||||
|
||||
// Act
|
||||
var activeWorkers = await _repository.ListByStatusAsync(WorkerStatus.Active);
|
||||
|
||||
// Assert
|
||||
activeWorkers.Should().HaveCount(1);
|
||||
activeWorkers[0].Id.Should().Be(activeWorker.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SetStatus_ChangesWorkerStatus()
|
||||
{
|
||||
// Arrange
|
||||
var worker = CreateWorker();
|
||||
await _repository.UpsertAsync(worker);
|
||||
|
||||
// Act
|
||||
await _repository.SetStatusAsync(worker.Id, WorkerStatus.Draining);
|
||||
var fetched = await _repository.GetByIdAsync(worker.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.Status.Should().Be(WorkerStatus.Draining);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesWorker()
|
||||
{
|
||||
// Arrange
|
||||
var worker = CreateWorker();
|
||||
await _repository.UpsertAsync(worker);
|
||||
|
||||
// Act
|
||||
await _repository.DeleteAsync(worker.Id);
|
||||
var fetched = await _repository.GetByIdAsync(worker.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAllWorkers()
|
||||
{
|
||||
// Arrange
|
||||
var worker1 = CreateWorker();
|
||||
var worker2 = new WorkerEntity
|
||||
{
|
||||
Id = $"worker2-{Guid.NewGuid()}",
|
||||
Hostname = "node-02",
|
||||
Status = WorkerStatus.Active,
|
||||
JobTypes = ["scan"],
|
||||
MaxConcurrentJobs = 2
|
||||
};
|
||||
await _repository.UpsertAsync(worker1);
|
||||
await _repository.UpsertAsync(worker2);
|
||||
|
||||
// Act
|
||||
var workers = await _repository.ListAsync();
|
||||
|
||||
// Assert
|
||||
workers.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
private WorkerEntity CreateWorker() => new()
|
||||
{
|
||||
Id = $"worker-{Guid.NewGuid()}",
|
||||
Hostname = "test-host",
|
||||
Status = WorkerStatus.Active,
|
||||
JobTypes = ["scan"],
|
||||
MaxConcurrentJobs = 4
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user