- Introduced a blueprint for explainable quiet alerts, detailing phases for SBOM, VEX readiness, and attestations. - Developed a roadmap for deterministic diff-aware rescans, enhancing scanner speed and efficiency. - Implemented a hash-based SBOM layer cache to optimize container scans by reusing previous results. - Created a multi-runtime reachability corpus to validate function-level reachability across various programming languages. - Proposed a stable SBOM model using SPDX 3.0.1 for persistence and CycloneDX 1.6 for interchange. - Established a validation plan for quiet scans, focusing on provenance and CI integration. - Documented guidelines for the Findings Ledger module, outlining roles, execution rules, and testing protocols.
14 KiB
Here is a complete, implementation-ready sketch you can drop into your solution and tune.
I assume:
- ASP.NET Core Web API (.NET 10).
- EF
DbContextwithDbSet<PolCensusList>. - Excel via ClosedXML (clean API, MIT license, built on OpenXML).
1. NuGet packages
Add to the Web/API project:
dotnet add package ClosedXML
dotnet add package DocumentFormat.OpenXml
2. File repository abstraction
This matches your requirement: upload/download by bucketId + fileId, plus stream variants.
public interface IFileRepository
{
// Uploads a file identified by bucketId + fileId from a Stream
Task UploadAsync(
string bucketId,
string fileId,
Stream content,
string contentType,
CancellationToken cancellationToken = default);
// Uploads a file from an in-memory buffer
Task UploadAsync(
string bucketId,
string fileId,
byte[] content,
string contentType,
CancellationToken cancellationToken = default);
// Downloads a file as a Stream (caller is responsible for disposing)
Task<Stream> DownloadAsStreamAsync(
string bucketId,
string fileId,
CancellationToken cancellationToken = default);
// Downloads a file as a byte[] buffer
Task<byte[]> DownloadAsBytesAsync(
string bucketId,
string fileId,
CancellationToken cancellationToken = default);
}
Example of a simple implementation over some IFileStoreClient (adjust to your FileStore API):
public sealed class FileStoreRepository : IFileRepository
{
private readonly IFileStoreClient _client;
public FileStoreRepository(IFileStoreClient client)
{
_client = client;
}
public async Task UploadAsync(
string bucketId,
string fileId,
Stream content,
string contentType,
CancellationToken cancellationToken = default)
{
// Example – adapt to your real client
await _client.PutObjectAsync(
bucketId: bucketId,
objectId: fileId,
content: content,
contentType: contentType,
cancellationToken: cancellationToken);
}
public async Task UploadAsync(
string bucketId,
string fileId,
byte[] content,
string contentType,
CancellationToken cancellationToken = default)
{
await using var ms = new MemoryStream(content, writable: false);
await UploadAsync(bucketId, fileId, ms, contentType, cancellationToken);
}
public async Task<Stream> DownloadAsStreamAsync(
string bucketId,
string fileId,
CancellationToken cancellationToken = default)
{
// Must return a readable Stream ready for ClosedXML
return await _client.GetObjectStreamAsync(
bucketId: bucketId,
objectId: fileId,
cancellationToken: cancellationToken);
}
public async Task<byte[]> DownloadAsBytesAsync(
string bucketId,
string fileId,
CancellationToken cancellationToken = default)
{
await using var stream = await DownloadAsStreamAsync(bucketId, fileId, cancellationToken);
using var ms = new MemoryStream();
await stream.CopyToAsync(ms, cancellationToken);
return ms.ToArray();
}
}
Register in DI:
builder.Services.AddScoped<IFileRepository, FileStoreRepository>();
3. Import service for PolCensusList from Excel
To keep the controller thin, put Excel parsing + EF into a service.
Assumptions (adjust as needed):
- The file is an
.xlsxwith a header row. - Data starts at row 2.
- Columns are:
| Column | Excel | Property |
|---|---|---|
| A | 1 | CustPid |
| B | 2 | Gname |
| C | 3 | Sname |
| D | 4 | Fname |
| E | 5 | BirthDate |
| F | 6 | Gender |
| G | 7 | Bmi |
| H | 8 | Dependant |
| I | 9 | DependantOn |
| J | 10 | MemberAction |
| K | 11 | GrpCode |
| L | 12 | BeginDate |
| M | 13 | SrCustId |
| N | 14 | MemberPolicyId |
| O | 15 | MemberAnnexId |
| P | 16 | ErrMsg |
Other fields (SrPolicyId, SrAnnexId, FileId, Tstamp) are taken from parameters/system.
using System.Globalization;
using ClosedXML.Excel;
using Microsoft.EntityFrameworkCore;
public interface IPolCensusImportService
{
Task<int> ImportFromExcelAsync(
string bucketId,
string fileId,
decimal srPolicyId,
decimal srAnnexId,
CancellationToken cancellationToken = default);
}
public sealed class PolCensusImportService : IPolCensusImportService
{
private readonly SerdicaHealthContext _dbContext;
private readonly IFileRepository _fileRepository;
public PolCensusImportService(
SerdicaHealthContext dbContext,
IFileRepository fileRepository)
{
_dbContext = dbContext;
_fileRepository = fileRepository;
}
public async Task<int> ImportFromExcelAsync(
string bucketId,
string fileId,
decimal srPolicyId,
decimal srAnnexId,
CancellationToken cancellationToken = default)
{
await using var stream = await _fileRepository.DownloadAsStreamAsync(bucketId, fileId, cancellationToken);
using var workbook = new XLWorkbook(stream);
var worksheet = workbook.Worksheets.First();
var now = DateTime.UtcNow;
var entities = new List<PolCensusList>();
const int headerRow = 1;
var firstDataRow = headerRow + 1;
for (var row = firstDataRow; ; row++)
{
var rowRange = worksheet.Row(row);
if (rowRange.IsEmpty()) break; // Stop on first fully empty row
// Minimal “empty row” check – no CustPid and no Name => stop
var custPidCell = rowRange.Cell(1);
var gnameCell = rowRange.Cell(2);
var snameCell = rowRange.Cell(3);
if (custPidCell.IsEmpty() && gnameCell.IsEmpty() && snameCell.IsEmpty())
{
break;
}
var entity = new PolCensusList
{
// Non-null FK fields from parameters
SrPolicyId = srPolicyId,
SrAnnexId = srAnnexId,
CustPid = custPidCell.GetString().Trim(),
Gname = gnameCell.GetString().Trim(),
Sname = snameCell.GetString().Trim(),
Fname = rowRange.Cell(4).GetString().Trim(),
BirthDate = GetDate(rowRange.Cell(5)),
Gender = rowRange.Cell(6).GetString().Trim(),
Bmi = GetDecimal(rowRange.Cell(7)),
Dependant = rowRange.Cell(8).GetString().Trim(),
DependantOn = rowRange.Cell(9).GetString().Trim(),
MemberAction = rowRange.Cell(10).GetString().Trim(),
GrpCode = rowRange.Cell(11).GetString().Trim(),
BeginDate = GetNullableDate(rowRange.Cell(12)),
SrCustId = GetNullableDecimal(rowRange.Cell(13)),
MemberPolicyId= GetNullableDecimal(rowRange.Cell(14)),
MemberAnnexId = GetNullableDecimal(rowRange.Cell(15)),
ErrMsg = rowRange.Cell(16).GetString().Trim(),
// Audit / technical fields
Tstamp = now,
FileId = fileId,
// Attr* left null for now – can be mapped later if needed
};
entities.Add(entity);
}
await using var transaction = await _dbContext.Database.BeginTransactionAsync(cancellationToken);
try
{
await _dbContext.PolCensusLists.AddRangeAsync(entities, cancellationToken);
var affected = await _dbContext.SaveChangesAsync(cancellationToken);
await transaction.CommitAsync(cancellationToken);
return affected;
}
catch
{
await transaction.RollbackAsync(cancellationToken);
throw;
}
}
private static DateTime GetDate(IXLCell cell)
{
if (cell.DataType == XLDataType.DateTime &&
cell.GetDateTime() != default)
{
return cell.GetDateTime().Date;
}
var raw = cell.GetString().Trim();
if (string.IsNullOrEmpty(raw))
throw new InvalidOperationException("BirthDate is required but empty.");
// Try a few reasonable formats – extend if needed
var formats = new[]
{
"dd.MM.yyyy",
"dd/MM/yyyy",
"yyyy-MM-dd",
"M/d/yyyy",
};
if (DateTime.TryParseExact(raw, formats,
CultureInfo.InvariantCulture,
DateTimeStyles.AssumeLocal,
out var dt))
{
return dt.Date;
}
if (DateTime.TryParse(raw, CultureInfo.CurrentCulture,
DateTimeStyles.AssumeLocal, out var dt2))
{
return dt2.Date;
}
throw new FormatException($"Cannot parse date value '{raw}'.");
}
private static DateTime? GetNullableDate(IXLCell cell)
{
if (cell.IsEmpty()) return null;
if (cell.DataType == XLDataType.DateTime &&
cell.GetDateTime() != default)
{
return cell.GetDateTime().Date;
}
var raw = cell.GetString().Trim();
if (string.IsNullOrEmpty(raw)) return null;
var formats = new[]
{
"dd.MM.yyyy",
"dd/MM/yyyy",
"yyyy-MM-dd",
"M/d/yyyy",
};
if (DateTime.TryParseExact(raw, formats,
CultureInfo.InvariantCulture,
DateTimeStyles.AssumeLocal,
out var dt))
{
return dt.Date;
}
if (DateTime.TryParse(raw, CultureInfo.CurrentCulture,
DateTimeStyles.AssumeLocal, out var dt2))
{
return dt2.Date;
}
return null;
}
private static decimal GetDecimal(IXLCell cell)
{
if (cell.IsEmpty()) return 0m;
if (cell.DataType == XLDataType.Number)
{
return (decimal)cell.GetDouble();
}
var raw = cell.GetString().Trim();
if (string.IsNullOrEmpty(raw)) return 0m;
// Try invariant and current culture
if (decimal.TryParse(raw, NumberStyles.Any, CultureInfo.InvariantCulture, out var result))
return result;
if (decimal.TryParse(raw, NumberStyles.Any, CultureInfo.CurrentCulture, out result))
return result;
throw new FormatException($"Cannot parse decimal value '{raw}'.");
}
private static decimal? GetNullableDecimal(IXLCell cell)
{
if (cell.IsEmpty()) return null;
if (cell.DataType == XLDataType.Number)
{
return (decimal)cell.GetDouble();
}
var raw = cell.GetString().Trim();
if (string.IsNullOrEmpty(raw)) return null;
if (decimal.TryParse(raw, NumberStyles.Any, CultureInfo.InvariantCulture, out var result))
return result;
if (decimal.TryParse(raw, NumberStyles.Any, CultureInfo.CurrentCulture, out result))
return result;
return null;
}
}
Register the service:
builder.Services.AddScoped<IPolCensusImportService, PolCensusImportService>();
4. API endpoint: import census data by fileId
Controller for IPAL / Serdica health module:
using Microsoft.AspNetCore.Mvc;
[ApiController]
[Route("api/health/census")]
public sealed class PolCensusController : ControllerBase
{
private readonly IPolCensusImportService _importService;
public PolCensusController(IPolCensusImportService importService)
{
_importService = importService;
}
/// <summary>
/// Imports census members from an Excel file stored in FileStore.
/// </summary>
/// <param name="bucketId">Logical bucket / container in FileStore (e.g. "ipal-serdica-census").</param>
/// <param name="fileId">Unique file identifier in FileStore.</param>
/// <param name="srPolicyId">Serdica policy id (FK for PolCensusList).</param>
/// <param name="srAnnexId">Serdica annex id (FK for PolCensusList).</param>
[HttpPost("import/{bucketId}/{fileId}")]
public async Task<IActionResult> ImportFromFile(
string bucketId,
string fileId,
[FromQuery] decimal srPolicyId,
[FromQuery] decimal srAnnexId,
CancellationToken cancellationToken)
{
var importedCount = await _importService.ImportFromExcelAsync(
bucketId,
fileId,
srPolicyId,
srAnnexId,
cancellationToken);
return Ok(new
{
imported = importedCount,
fileId,
bucketId,
srPolicyId,
srAnnexId
});
}
}
5. Optional: upload endpoint using IFileRepository
If you want a simple upload entry point compatible with the repository:
[HttpPost("upload")]
public async Task<IActionResult> UploadCensusFile(
[FromForm] IFormFile file,
[FromQuery] string bucketId,
CancellationToken cancellationToken)
{
if (file == null || file.Length == 0)
return BadRequest("File is required.");
var fileId = Guid.NewGuid().ToString("N");
await using var stream = file.OpenReadStream();
await _fileRepository.UploadAsync(
bucketId,
fileId,
stream,
file.ContentType ?? "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
cancellationToken);
return Ok(new { fileId, bucketId });
}
If you tell me:
- the exact Excel column layout you will get from IPAL / Serdica
- whether
CensusIdis sequence-generated in Oracle or must be populated in code,
I can tighten the mapping + EF configuration so that it matches your schema 1:1 and is ready for production.