(feature): cache handling utilities

This commit is contained in:
Maksym Sadovnychyy 2025-11-07 22:53:10 +01:00
parent 7d60b77c62
commit 1e2d4156a5
27 changed files with 683 additions and 201 deletions

View File

@ -0,0 +1,11 @@
using System.ComponentModel.DataAnnotations;
namespace MaksIT.LetsEncrypt.Entities;
public enum ChalengeType {
[Display(Name = "http-01")]
http,
[Display(Name = "dns-01")]
dns,
}

View File

@ -1,27 +1,16 @@
using System.ComponentModel.DataAnnotations;
namespace MaksIT.LetsEncrypt.Entities
{
public enum ContentType
{
[Display(Name = "application/jose+json")]
JoseJson,
[Display(Name = "application/problem+json")]
ProblemJson,
[Display(Name = "application/pem-certificate-chain")]
PemCertificateChain,
[Display(Name = "application/json")]
Json
}
public static class ContentTypeExtensions
{
public static string GetDisplayName(this ContentType contentType)
{
var type = typeof(ContentType);
var memInfo = type.GetMember(contentType.ToString());
var attributes = memInfo[0].GetCustomAttributes(typeof(DisplayAttribute), false);
return attributes.Length > 0 ? ((DisplayAttribute)attributes[0]).Name : contentType.ToString();
}
}
}
namespace MaksIT.LetsEncrypt.Entities;
public enum ContentType
{
[Display(Name = "application/jose+json")]
JoseJson,
[Display(Name = "application/problem+json")]
ProblemJson,
[Display(Name = "application/pem-certificate-chain")]
PemCertificateChain,
[Display(Name = "application/json")]
Json
}

View File

@ -18,7 +18,7 @@ public class RegistrationCache {
public required string Description { get; set; }
public required string[] Contacts { get; set; }
public required bool IsStaging { get; set; }
public string? ChallengeType { get; set; }
public required string ChallengeType { get; set; }
#endregion

View File

@ -1,27 +1,16 @@
using System.ComponentModel.DataAnnotations;
namespace MaksIT.LetsEncrypt.Entities
{
public enum OrderStatus
{
[Display(Name = "pending")]
Pending,
[Display(Name = "valid")]
Valid,
[Display(Name = "ready")]
Ready,
[Display(Name = "processing")]
Processing
}
public static class OrderStatusExtensions
{
public static string GetDisplayName(this OrderStatus status)
{
var type = typeof(OrderStatus);
var memInfo = type.GetMember(status.ToString());
var attributes = memInfo[0].GetCustomAttributes(typeof(DisplayAttribute), false);
return attributes.Length > 0 ? ((DisplayAttribute)attributes[0]).Name : status.ToString();
}
}
}
namespace MaksIT.LetsEncrypt.Entities;
public enum OrderStatus
{
[Display(Name = "pending")]
Pending,
[Display(Name = "valid")]
Valid,
[Display(Name = "ready")]
Ready,
[Display(Name = "processing")]
Processing
}

View File

@ -8,7 +8,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MaksIT.Core" Version="1.5.1" />
<PackageReference Include="MaksIT.Core" Version="1.5.2" />
<PackageReference Include="MaksIT.Results" Version="1.1.1" />
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="9.0.10" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="9.0.10" />

View File

@ -22,6 +22,7 @@ using System.Text;
namespace MaksIT.LetsEncrypt.Services;
public interface ILetsEncryptService {
Task<Result> ConfigureClient(Guid sessionId, bool isStaging);
Task<Result> Init(Guid sessionId,Guid accountId, string description, string[] contacts, RegistrationCache? registrationCache);
@ -216,6 +217,7 @@ public class LetsEncryptService : ILetsEncryptService {
Description = description,
Contacts = contacts,
IsStaging = state.IsStaging,
ChallengeType = ChalengeType.http.GetDisplayName(),
Location = result.Result.Location,
AccountKey = accountKey.ExportCspBlob(true),
Id = result.Result.Id ?? string.Empty,

View File

@ -0,0 +1,23 @@
using MaksIT.Results;
namespace LetsEncryptServer.Abstractions;
public abstract class ServiceBase {
protected Result UnsupportedPatchOperationResponse() {
return Result.BadRequest("Unsupported operation");
}
protected Result<T?> UnsupportedPatchOperationResponse<T>() {
return Result<T?>.BadRequest(default, "Unsupported operation");
}
protected Result PatchFieldIsNotDefined(string fieldName) {
return Result.BadRequest($"It's not possible to set non defined field {fieldName}.");
}
protected Result<T?> PatchFieldIsNotDefined<T>(string fieldName) {
return Result<T?>.BadRequest(default, $"It's not possible to set non defined field {fieldName}.");
}
}

View File

@ -8,8 +8,8 @@ namespace LetsEncryptServer.Controllers;
public class CacheController(ICacheService cacheService) : ControllerBase {
private readonly ICacheService _cacheService = cacheService;
[HttpGet("caches/download")]
public async Task<IActionResult> GetCaches() {
[HttpGet("cache/download")]
public async Task<IActionResult> GetCache() {
var result = await _cacheService.DownloadCacheZipAsync();
if (!result.IsSuccess || result.Value == null) {
return result.ToActionResult();
@ -17,12 +17,23 @@ public class CacheController(ICacheService cacheService) : ControllerBase {
var bytes = result.Value;
return File(bytes, "application/zip", "caches.zip");
return File(bytes, "application/zip", "cache.zip");
}
[HttpPost("caches/upload")]
public async Task<IActionResult> PostCaches([FromBody] byte[] zipBytes) {
var result = await _cacheService.UploadCacheZipAsync(zipBytes);
[HttpPost("cache/upload")]
//[RequestSizeLimit(200_000_000)]
public async Task<IActionResult> PostCache([FromForm] IFormFile file) {
if (file is null || file.Length == 0) return BadRequest("No file.");
using var ms = new MemoryStream();
await file.CopyToAsync(ms);
var result = await _cacheService.UploadCacheZipAsync(ms.ToArray());
return result.ToActionResult();
}
[HttpDelete("cache")]
public IActionResult DeleteCache() {
var result = _cacheService.DeleteCacheAsync();
return result.ToActionResult();
}
@ -43,4 +54,6 @@ public class CacheController(ICacheService cacheService) : ControllerBase {
var result = await _cacheService.UploadAccountCacheZipAsync(accountId, zipBytes);
return result.ToActionResult();
}
}

View File

@ -1,16 +1,19 @@
using MaksIT.Core.Webapi.Middlewares;
using MaksIT.Core.Logging;
using MaksIT.Core.Webapi.Middlewares;
using MaksIT.LetsEncrypt.Extensions;
using MaksIT.LetsEncrypt.Services;
using MaksIT.LetsEncryptServer;
using MaksIT.LetsEncryptServer.BackgroundServices;
using MaksIT.LetsEncryptServer.Services;
using System.Text.Json.Serialization;
var builder = WebApplication.CreateBuilder(args);
// Extract configuration
var configuration = builder.Configuration;
// Add logging
builder.Logging.AddConsoleLogger();
var configMapPath = Path.Combine(Path.DirectorySeparatorChar.ToString(), "configMap", "appsettings.json");
if (File.Exists(configMapPath)) {
configuration.AddJsonFile(configMapPath, optional: false, reloadOnChange: true);
@ -25,16 +28,16 @@ if (File.Exists(secretsPath)) {
var configurationSection = configuration.GetSection("Configuration");
var appSettings = configurationSection.Get<Configuration>() ?? throw new ArgumentNullException();
// Add logging
builder.Logging.AddConsoleLogger();
// Allow configurations to be available through IOptions<Configuration>
builder.Services.Configure<Configuration>(configurationSection);
// Add services to the container.
builder.Services.AddControllers()
.AddJsonOptions(options => {
options.JsonSerializerOptions.DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull;
});
builder.Services.AddControllers();
// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen();

View File

@ -1,9 +1,13 @@

using LetsEncryptServer.Abstractions;
using MaksIT.Core.Webapi.Models;
using MaksIT.LetsEncrypt.Entities;
using MaksIT.Models;
using MaksIT.Models.LetsEncryptServer.Account.Requests;
using MaksIT.Models.LetsEncryptServer.Account.Responses;
using MaksIT.Results;
using System;
using static System.Collections.Specialized.BitVector32;
namespace MaksIT.LetsEncryptServer.Services;
@ -23,7 +27,7 @@ public interface IAccountRestService {
public interface IAccountService : IAccountInternalService, IAccountRestService { }
public class AccountService : IAccountService {
public class AccountService : ServiceBase, IAccountService {
private readonly ILogger<CacheService> _logger;
private readonly ICacheService _cacheService;
@ -69,8 +73,6 @@ public class AccountService : IAccountService {
public async Task<Result<GetAccountResponse?>> PostAccountAsync(PostAccountRequest requestData) {
// TODO: check for overlapping hostnames in already existing accounts
var fullFlowResult = await _certsFlowService.FullFlow(
requestData.IsStaging,
null,
@ -80,19 +82,17 @@ public class AccountService : IAccountService {
requestData.Hostnames
);
if (!fullFlowResult.IsSuccess || fullFlowResult.Value == null)
return fullFlowResult.ToResultOfType<GetAccountResponse?>(_ => null);
var accountId = fullFlowResult.Value.Value;
var loadAccauntFromCacheResult = await _cacheService.LoadAccountFromCacheAsync(accountId);
if (!loadAccauntFromCacheResult.IsSuccess || loadAccauntFromCacheResult.Value == null) {
return loadAccauntFromCacheResult.ToResultOfType<GetAccountResponse?>(_ => null);
var loadAccountFromCacheResult = await _cacheService.LoadAccountFromCacheAsync(accountId);
if (!loadAccountFromCacheResult.IsSuccess || loadAccountFromCacheResult.Value == null) {
return loadAccountFromCacheResult.ToResultOfType<GetAccountResponse?>(_ => null);
}
var cache = loadAccauntFromCacheResult.Value;
var cache = loadAccountFromCacheResult.Value;
return Result<GetAccountResponse?>.Ok(CreateGetAccountResponse(accountId, cache));
}
@ -105,76 +105,60 @@ public class AccountService : IAccountService {
var cache = loadAccountResult.Value;
if (requestData.Description != null) {
switch (requestData.Description.Op) {
case PatchOperation.Replace:
cache.Description = requestData.Description.Value;
if (requestData.TryGetOperation(nameof(requestData.Description), out var patchOperation)) {
switch (patchOperation) {
case PatchOperation.SetField:
if (requestData.Description == null)
return PatchFieldIsNotDefined<GetAccountResponse?>(nameof(requestData.Description));
cache.Description = requestData.Description;
break;
default:
return UnsupportedPatchOperationResponse<GetAccountResponse?>();
}
}
if (requestData.IsDisabled != null) {
switch (requestData.IsDisabled.Op) {
case PatchOperation.Replace:
if (requestData.TryGetOperation(nameof(requestData.IsDisabled), out patchOperation)) {
switch (patchOperation) {
case PatchOperation.SetField:
if (requestData.IsDisabled == null)
return PatchFieldIsNotDefined<GetAccountResponse?>(nameof(requestData.IsDisabled));
cache.IsDisabled = requestData.IsDisabled.Value;
break;
default:
return UnsupportedPatchOperationResponse<GetAccountResponse?>();
}
}
if (requestData.Contacts?.Any() == true) {
var contacts = cache.Contacts?.ToList() ?? new List<string>();
foreach (var action in requestData.Contacts) {
switch (action.Op)
{
case PatchOperation.Add:
if (action.Value != null) contacts.Add(action.Value);
break;
case PatchOperation.Replace:
if (action.Index != null && action.Index >= 0 && action.Index < contacts.Count)
contacts[action.Index.Value] = action.Value;
break;
case PatchOperation.Remove:
if (action.Index != null && action.Index >= 0 && action.Index < contacts.Count)
contacts.RemoveAt(action.Index.Value);
break;
}
if (requestData.TryGetOperation(nameof(requestData.Contacts), out patchOperation)) {
switch (patchOperation) {
case PatchOperation.SetField:
if (requestData.Contacts == null)
return PatchFieldIsNotDefined<GetAccountResponse?>(nameof(requestData.Contacts));
cache.Contacts = requestData.Contacts.ToArray();
break;
}
cache.Contacts = contacts.ToArray();
}
#region Patch Hostnames
var hostnamesToAdd = new List<string>();
var hostnamesToRemove = new List<string>();
if (requestData.Hostnames?.Any() == true) {
var hostnames = cache.GetHosts().ToList();
foreach (var action in requestData.Hostnames) {
foreach (var hostnameRequestData in requestData.Hostnames ?? []) {
if (hostnameRequestData.TryGetOperation("collectionItemOperation", out patchOperation)) {
if (action.Hostname != null) {
switch (action.Hostname.Op) {
case PatchOperation.Add:
hostnamesToAdd.Add(action.Hostname.Value);
if (hostnameRequestData.Hostname == null)
return PatchFieldIsNotDefined<GetAccountResponse?>(nameof(hostnameRequestData.Hostname));
break;
switch (patchOperation) {
case PatchOperation.AddToCollection:
hostnamesToAdd.Add(hostnameRequestData.Hostname);
break;
case PatchOperation.Replace:
if (action.Hostname.Index != null && action.Hostname.Index >= 0 && action.Hostname.Index < hostnames.Count)
hostnames[action.Hostname.Index.Value].Hostname = action.Hostname.Value;
break;
case PatchOperation.Remove:
hostnamesToRemove.Add(action.Hostname.Value);
break;
}
}
if (action.IsDisabled != null) {
switch (action.IsDisabled.Op) {
case PatchOperation.Replace:
break;
}
case PatchOperation.RemoveFromCollection:
hostnamesToRemove.Add(hostnameRequestData.Hostname);
break;
}
}
}
@ -210,6 +194,7 @@ public class AccountService : IAccountService {
if (!revokeResult.IsSuccess)
return revokeResult.ToResultOfType<GetAccountResponse?>(default);
}
#endregion
loadAccountResult = await _cacheService.LoadAccountFromCacheAsync(accountId);
if (!loadAccountResult.IsSuccess || loadAccountResult.Value == null) {

View File

@ -19,7 +19,7 @@ public interface ICacheService {
Task<Result<byte[]?>> DownloadAccountCacheZipAsync(Guid accountId);
Task<Result> UploadCacheZipAsync(byte[] zipBytes);
Task<Result> UploadAccountCacheZipAsync(Guid accountId, byte[] zipBytes);
Task<Result> ClearCacheAsync();
Result DeleteCacheAsync();
}
public class CacheService : ICacheService, IDisposable {
@ -222,11 +222,18 @@ public class CacheService : ICacheService, IDisposable {
}
}
public async Task<Result> ClearCacheAsync() {
public Result DeleteCacheAsync() {
try {
if (Directory.Exists(_cacheDirectory)) {
Directory.Delete(_cacheDirectory, true);
_logger.LogInformation("Cache directory cleared.");
// Delete all files
foreach (var file in Directory.GetFiles(_cacheDirectory)) {
File.Delete(file);
}
// Delete all subdirectories
foreach (var dir in Directory.GetDirectories(_cacheDirectory)) {
Directory.Delete(dir, true);
}
_logger.LogInformation("Cache directory contents cleared.");
}
else {
_logger.LogWarning("Cache directory not found to clear.");
@ -234,7 +241,7 @@ public class CacheService : ICacheService, IDisposable {
return Result.Ok();
}
catch (Exception ex) {
var message = "Error clearing cache directory.";
var message = "Error clearing cache directory contents.";
_logger.LogError(ex, message);
return Result.InternalServerError([message, .. ex.ExtractMessages()]);
}

View File

@ -1,3 +1,4 @@
VITE_APP_TITLE=MaksIT.CertsUI
VITE_COMPANY=MaksIT
VITE_COMPANY_URL=https://maks-it.com
VITE_API_URL=http://localhost:8080/api

View File

@ -11,6 +11,7 @@
"@reduxjs/toolkit": "^2.8.2",
"@tanstack/react-table": "^8.21.3",
"axios": "^1.11.0",
"client-zip": "^2.5.0",
"date-fns": "^4.1.0",
"jwt-decode": "^4.0.0",
"lodash": "^4.17.21",
@ -2632,6 +2633,12 @@
"node": ">=18"
}
},
"node_modules/client-zip": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/client-zip/-/client-zip-2.5.0.tgz",
"integrity": "sha512-ydG4nDZesbFurnNq0VVCp/yyomIBh+X/1fZPI/P24zbnG4dtC4tQAfI5uQsomigsUMeiRO2wiTPizLWQh+IAyQ==",
"license": "MIT"
},
"node_modules/clsx": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz",

View File

@ -13,6 +13,7 @@
"@reduxjs/toolkit": "^2.8.2",
"@tanstack/react-table": "^8.21.3",
"axios": "^1.11.0",
"client-zip": "^2.5.0",
"date-fns": "^4.1.0",
"jwt-decode": "^4.0.0",
"lodash": "^4.17.21",

View File

@ -45,7 +45,7 @@ const LayoutWrapper: FC<LayoutWrapperProps> = (props) => {
}
footer={
{
children: <p>&copy; {new Date().getFullYear()} {import.meta.env.VITE_COMPANY}</p>
children: <p>&copy; {new Date().getFullYear()} <a href={import.meta.env.VITE_COMPANY_URL}>{import.meta.env.VITE_COMPANY}</a></p>
}
}
>{children}</Layout>
@ -147,7 +147,9 @@ const AppMap: AppMapType[] = [
enum ApiRoutes {
ACCOUNTS = 'GET|/accounts',
// Accounts
ACCOUNTS_GET = 'GET|/accounts',
ACCOUNT_POST = 'POST|/account',
ACCOUNT_GET = 'GET|/account/{accountId}',
@ -160,14 +162,23 @@ enum ApiRoutes {
// ACCOUNT_ID_HOSTNAMES = 'GET|/account/{accountId}/hostnames',
// ACCOUNT_ID_HOSTNAME_ID = 'GET|/account/{accountId}/hostname/{index}',
// Agents
AGENT_TEST = 'GET|/agent/test',
// Certs flow
CERTS_FLOW_CONFIGURE_CLIENT = 'POST|/certs/configure-client',
CERTS_FLOW_TERMS_OF_SERVICE = 'GET|/certs/{sessionId}/terms-of-service',
CERTS_FLOW_CERTIFICATES_APPLY = 'POST|/certs/{accountId}/certificates/apply',
// Caches
FULL_CACHE_DOWNLOAD_GET = 'GET|/cache/download',
FULL_CACHE_UPLOAD_POST = 'POST|/cache/upload',
FULL_CACHE_DELETE = 'DELETE|/cache',
CACHE_DOWNLOAD_GET = 'GET|/cache/{accountId}/download/',
CACHE_UPLOAD_POST = 'POST|/cache/{accountId}/upload/',
// Agents
AGENT_TEST = 'GET|/agent/test',
// Secrets
generateSecret = 'GET|/secret/generatesecret',

View File

@ -5,10 +5,8 @@ import { store } from './redux/store'
import { refreshJwt } from './redux/slices/identitySlice'
import { hideLoader, showLoader } from './redux/slices/loaderSlice'
import { addToast } from './components/Toast/addToast'
import { de } from 'zod/v4/locales'
import { deepPatternMatch } from './functions'
import { ProblemDetails, ProblemDetailsProto } from './models/ProblemDetails'
import { add } from 'lodash'
import { ProblemDetails } from './models/ProblemDetails'
// Create an Axios instance
const axiosInstance = axios.create({
@ -99,6 +97,7 @@ axiosInstance.interceptors.response.use(
* Performs a GET request and returns the response data.
* @param url The endpoint URL.
* @param timeout Optional timeout in milliseconds to override the default.
* @returns The response data, or undefined if an error occurs.
*/
const getData = async <TResponse>(url: string, timeout?: number): Promise<TResponse | undefined> => {
try {
@ -120,6 +119,7 @@ const getData = async <TResponse>(url: string, timeout?: number): Promise<TRespo
* @param url The endpoint URL.
* @param data The request payload.
* @param timeout Optional timeout in milliseconds to override the default.
* @returns The response data, or undefined if an error occurs.
*/
const postData = async <TRequest, TResponse>(url: string, data?: TRequest, timeout?: number): Promise<TResponse | undefined> => {
try {
@ -142,6 +142,7 @@ const postData = async <TRequest, TResponse>(url: string, data?: TRequest, timeo
* @param url The endpoint URL.
* @param data The request payload.
* @param timeout Optional timeout in milliseconds to override the default.
* @returns The response data, or undefined if an error occurs.
*/
const patchData = async <TRequest, TResponse>(url: string, data: TRequest, timeout?: number): Promise<TResponse | undefined> => {
try {
@ -163,6 +164,7 @@ const patchData = async <TRequest, TResponse>(url: string, data: TRequest, timeo
* @param url The endpoint URL.
* @param data The request payload.
* @param timeout Optional timeout in milliseconds to override the default.
* @returns The response data, or undefined if an error occurs.
*/
const putData = async <TRequest, TResponse>(url: string, data: TRequest, timeout?: number): Promise<TResponse | undefined> => {
try {
@ -183,6 +185,7 @@ const putData = async <TRequest, TResponse>(url: string, data: TRequest, timeout
* Performs a DELETE request and returns the response data.
* @param url The endpoint URL.
* @param timeout Optional timeout in milliseconds to override the default.
* @returns The response data, or undefined if an error occurs.
*/
const deleteData = async <TResponse>(url: string, timeout?: number): Promise<TResponse | undefined> => {
try {
@ -199,11 +202,141 @@ const deleteData = async <TResponse>(url: string, timeout?: number): Promise<TRe
}
}
/**
* Performs a POST request with binary payload (e.g., file upload) and returns the response data.
* @param url The endpoint URL.
* @param data The binary request payload.
* @param timeout Optional timeout in milliseconds to override the default.
* @returns The response data, or undefined if an error occurs.
*/
const postBinary = async <TResponse>(
url: string,
data: Blob | ArrayBuffer | Uint8Array,
timeout?: number
): Promise<TResponse | undefined> => {
try {
const response = await axiosInstance.post<TResponse>(url, data, {
headers: {
'Content-Type': 'application/octet-stream'
},
...(timeout ? { timeout } : {})
})
return response.data
} catch {
// Error is already handled by interceptors, so just return undefined
return undefined
}
}
/**
* Performs a GET request to retrieve binary data (e.g., file download).
* @param url The endpoint URL.
* @param timeout Optional timeout in milliseconds to override the default.
* @param as The format to retrieve the binary data as ('arraybuffer' or 'blob').
* @returns The binary data and headers, or undefined if an error occurs.
*/
const getBinary = async (
url: string,
timeout?: number,
as: 'arraybuffer' | 'blob' = 'arraybuffer'
): Promise<{ data: ArrayBuffer | Blob, headers: Record<string, string> } | undefined> => {
try {
const response = await axiosInstance.get(url, {
responseType: as,
...(timeout ? { timeout } : {})
})
return {
data: response.data,
headers: response.headers as Record<string, string>
}
} catch {
// Error is already handled by interceptors, so just return undefined
return undefined
}
}
/**
* Performs a POST request using multipart/form-data.
* Accepts either a ready FormData or a record of fields to be converted into FormData.
* Note: Do NOT set the Content-Type header manually; the browser will include the boundary.
* @param url The endpoint URL.
* @param form The FormData instance or a record of fields.
* Values can be string | Blob | File | (string | Blob | File)[]
* @param timeout Optional timeout in milliseconds to override the default.
* @returns The response data, or undefined if an error occurs.
*/
const postFormData = async <TResponse>(
url: string,
form: FormData | Record<string, string | Blob | File | (string | Blob | File)[]>,
timeout?: number
): Promise<TResponse | undefined> => {
try {
const formData =
form instanceof FormData
? form
: (() => {
const fd = new FormData()
Object.entries(form).forEach(([key, value]) => {
if (Array.isArray(value)) {
value.forEach(v => fd.append(key, v))
} else {
fd.append(key, value)
}
})
return fd
})()
const response = await axiosInstance.post<TResponse>(url, formData, {
// Do NOT set Content-Type; the browser will set the correct multipart boundary
...(timeout ? { timeout } : {})
})
return response.data
} catch {
// Error is already handled by interceptors, so just return undefined
return undefined
}
}
/**
* Convenience helper for uploading a single file via multipart/form-data.
* @param url The endpoint URL.
* @param file The file/blob to upload.
* @param fieldName The form field name for the file (default: "file").
* @param filename Optional filename; if omitted and "file" is a File, the File.name is used.
* @param extraFields Optional extra key/value fields to include in the form.
* @param timeout Optional timeout in milliseconds to override the default.
* @returns The response data, or undefined if an error occurs.
*/
const postFile = async <TResponse>(
url: string,
file: Blob | File,
fieldName: string = 'file',
filename?: string,
extraFields?: Record<string, string>,
timeout?: number
): Promise<TResponse | undefined> => {
const fd = new FormData()
const inferredName = filename ?? (file instanceof File ? file.name : 'file')
fd.append(fieldName, file, inferredName)
if (extraFields) {
Object.entries(extraFields).forEach(([k, v]) => fd.append(k, v))
}
return postFormData<TResponse>(url, fd, timeout)
}
export {
axiosInstance,
getData,
postData,
patchData,
putData,
deleteData
deleteData,
postBinary,
getBinary,
postFormData,
postFile
}

View File

@ -24,8 +24,8 @@ const EditAccountHostnameFormProto = (): EditAccountHostnameFormProps => ({
})
const EditAccountHostnameFormSchema: Schema<EditAccountHostnameFormProps> = object({
hostname: string(),
isDisabled: boolean()
isDisabled: boolean(),
hostname: string()
})
interface EditAccountFormProps {
@ -95,7 +95,7 @@ const EditAccount: FC<EditAccountProps> = (props) => {
...RegisterFormProto(),
isDisabled: response.isDisabled,
description: response.description,
contacts: response.contacts,
contacts: [...response.contacts],
hostnames: (response.hostnames ?? []).map(h => ({
...EditAccountHostnameFormProto(),
isDisabled: h.isDisabled,
@ -124,9 +124,10 @@ const EditAccount: FC<EditAccountProps> = (props) => {
const patchRequest: PatchAccountRequest = {
isDisabled: formStateCopy.isDisabled,
description: formStateCopy.description,
contacts: formStateCopy.contacts,
contacts: [...formStateCopy.contacts],
hostnames: formStateCopy.hostnames.map(h => ({
hostname: h.hostname
hostname: h.hostname,
isDisabled: h.isDisabled
}))
}
@ -139,7 +140,11 @@ const EditAccount: FC<EditAccountProps> = (props) => {
const fromFormState = mapFormStateToPatchRequest(formState)
const fromBackupState = mapFormStateToPatchRequest(backupState)
const delta = deepDelta(fromBackupState, fromFormState)
const delta = deepDelta(fromFormState, fromBackupState, {
arrays: {
hostnames: { identityKey: 'hostname' }
}
})
if (!deltaHasOperations(delta)) {
addToast('No changes detected', 'info')
@ -147,6 +152,7 @@ const EditAccount: FC<EditAccountProps> = (props) => {
}
const request = PatchAccountRequestSchema.safeParse(delta)
if (!request.success) {
request.error.issues.forEach(error => {
addToast(error.message, 'error')
@ -156,7 +162,7 @@ const EditAccount: FC<EditAccountProps> = (props) => {
}
patchData<PatchAccountRequest, GetAccountResponse>(GetApiRoute(ApiRoutes.ACCOUNT_PATCH).route
.replace('{accountId}', accountId), delta
.replace('{accountId}', accountId), delta, 120000
).then((response) => {
if (!response) return
@ -215,9 +221,6 @@ const EditAccount: FC<EditAccountProps> = (props) => {
label={'New Contact'}
value={formState.contact}
onChange={(e) => {
if (formState.contacts.includes(e.target.value))
return
handleInputChange('contact', e.target.value)
}}
placeholder={'Add contact'}
@ -227,6 +230,9 @@ const EditAccount: FC<EditAccountProps> = (props) => {
<FieldContainer colspan={2}>
<ButtonComponent
onClick={() => {
if (formState.contacts.includes(formState.contact))
return
handleInputChange('contacts', [...formState.contacts, formState.contact])
handleInputChange('contact', '')
}}
@ -238,12 +244,31 @@ const EditAccount: FC<EditAccountProps> = (props) => {
<h3 className={'col-span-12'}>Hostnames:</h3>
<ul className={'col-span-12'}>
{formState.hostnames.map((hostname) => (
<li key={hostname.hostname} className={'grid grid-cols-12 gap-4 w-full'}>
<span className={'col-span-10'}>{hostname.hostname}</span>
<li key={hostname.hostname} className={'grid grid-cols-12 gap-4 w-full pb-2'}>
<span className={'col-span-7'}>{hostname.hostname}</span>
<span className={'col-span-3'}>
<label className={'mr-2'}>Disabled:</label>
<input
type={'checkbox'}
checked={hostname.isDisabled}
onChange={(e) => {
const updatedHostnames = formState.hostnames.map(h => {
if (h.hostname === hostname.hostname) {
return {
...h,
isDisabled: e.target.checked
}
}
return h
})
handleInputChange('hostnames', updatedHostnames)
}}
/>
</span>
<ButtonComponent
colspan={2}
onClick={() => {
const updatedHostnames = formState.hostnames.filter(h => h !== hostname)
const updatedHostnames = formState.hostnames.filter(h => h.hostname !== hostname.hostname)
handleInputChange('hostnames', updatedHostnames)
}}
>
@ -258,9 +283,6 @@ const EditAccount: FC<EditAccountProps> = (props) => {
label={'New Hostname'}
value={formState.hostname}
onChange={(e) => {
if (formState.hostnames.find(h => h.hostname === e.target.value))
return
handleInputChange('hostname', e.target.value)
}}
placeholder={'Add hostname'}
@ -270,7 +292,15 @@ const EditAccount: FC<EditAccountProps> = (props) => {
<FieldContainer colspan={2}>
<ButtonComponent
onClick={() => {
handleInputChange('hostnames', [...formState.hostnames, formState.hostname])
if (formState.hostnames.find(h => h.hostname === formState.hostname))
return
handleInputChange('hostnames', [ ...formState.hostnames, {
...EditAccountHostnameFormProto(),
hostname: formState.hostname
}
])
handleInputChange('hostname', '')
}}
disabled={formState.hostname.trim() === ''}

View File

@ -16,7 +16,7 @@ const Home: FC = () => {
const [accountId, setAccountId] = useState<string | undefined>(undefined)
const loadData = useCallback(() => {
getData<GetAccountResponse[]>(GetApiRoute(ApiRoutes.ACCOUNTS).route).then((response) => {
getData<GetAccountResponse[]>(GetApiRoute(ApiRoutes.ACCOUNTS_GET).route).then((response) => {
if (!response) return
setRawd(response)
})

View File

@ -1,9 +1,12 @@
import { FC, useState } from 'react'
import { FormContainer, FormContent, FormFooter, FormHeader } from '../components/FormLayout'
import { ButtonComponent, DateTimePickerComponent, FileUploadComponent } from '../components/editors'
import { ButtonComponent, FileUploadComponent } from '../components/editors'
import { ApiRoutes, GetApiRoute } from '../AppMap'
import { getData } from '../axiosConfig'
import { deleteData, getBinary, getData, postFile } from '../axiosConfig'
import { addToast } from '../components/Toast/addToast'
import { extractFilenameFromHeaders, saveBinaryToDisk } from '../functions'
import { downloadZip } from 'client-zip'
const Utilities: FC = () => {
@ -18,6 +21,40 @@ const Utilities: FC = () => {
})
}
const handleUploadFiles = async () => {
if (files.length === 0) {
addToast('No files selected for upload', 'error')
return
}
const zipBlob = await downloadZip(files).blob()
// Option A: direct file helper
postFile(GetApiRoute(ApiRoutes.FULL_CACHE_UPLOAD_POST).route, zipBlob, 'file', 'cache.zip')
.then((_) => {
setFiles([])
addToast('Files uploaded successfully', 'success')
})
}
const handleDownloadFiles = () => {
getBinary(GetApiRoute(ApiRoutes.FULL_CACHE_DOWNLOAD_GET).route
).then((response) => {
if (!response) return
const { data, headers } = response
const filename = extractFilenameFromHeaders(headers, 'cache.zip')
saveBinaryToDisk(data, filename)
})
}
const handleDestroyFiles = () => {
deleteData(GetApiRoute(ApiRoutes.FULL_CACHE_DELETE).route)
.then((_) => {
addToast('Cache files destroyed successfully', 'success')
})
}
return <FormContainer>
<FormHeader>Utilities</FormHeader>
<FormContent>
@ -29,25 +66,37 @@ const Utilities: FC = () => {
onClick={hadnleTestAgent}
/>
<span className={'col-span-9'}></span>
<FileUploadComponent
colspan={6}
label={'Upload cache files'}
label={'Select cache files'}
multiple={true}
onChange={setFiles}
/>
<ButtonComponent
colspan={3}
children={'Upload cache files'}
buttonHierarchy={'primary'}
onClick={handleUploadFiles}
/>
<span className={'col-span-3'}></span>
<ButtonComponent
colspan={3}
children={'Download cache files'}
buttonHierarchy={'secondary'}
onClick={() => {}}
onClick={handleDownloadFiles}
/>
<ButtonComponent
colspan={3}
children={'Destroy cache files'}
buttonHierarchy={'error'}
onClick={() => {}}
onClick={handleDestroyFiles}
/>
</div>
</FormContent>

View File

@ -1,4 +1,4 @@
import { PatchOperation } from '../../models/PatchOperation'
import { PatchOperation } from '../../models/PatchOperation.js'
import { deepCopy } from './deepCopy.js'
import { deepEqual } from './deepEqual.js'
@ -18,37 +18,71 @@ type PlainObject = Record<string, unknown>
type DeltaArrayItem<T extends Identifiable> = Partial<T> & EnsureId<T> & OperationBag
/** Policy non-generica: chiavi sempre stringhe */
/**
* Policy that controls how object arrays behave.
*
* - Arrays with identifiable items (id or identityKey) get per-item Add/Remove/Update logic.
* - Arrays without identity fall back to "full replace" semantics.
*/
export type ArrayPolicy = {
/** Nome del campo “radice” che implica re-parenting (es. 'organizationId') */
/** Name of the "root" field that implies re-parenting (e.g. 'organizationId') */
rootKey?: string
/** Nomi degli array figli da trattare in caso di re-parenting (es. ['applicationRoles']) */
/** Child array field names to process on re-parenting (e.g. ['applicationRoles']) */
childArrayKeys?: string[]
/** Se true, in re-parenting i figli vengono azzerati (default TRUE) */
/** If true, children are cleared on root change (default TRUE) */
dropChildrenOnRootChange?: boolean
/** Nome del campo ruolo (default 'role') */
/** Name of the role field (default 'role') */
roleFieldKey?: string
/** Se true, quando role diventa null si rimuove lintero item (default TRUE) */
/** If true, when role becomes null the entire item is removed (default TRUE) */
deleteItemWhenRoleRemoved?: boolean
/**
* Stable identity for items that do not have an `id`.
* Can be:
* - a property name (e.g. "hostname")
* - a function that extracts a unique value
*
* Without identityKey AND without item.id, the array falls back to full replace.
*/
identityKey?: string | ((item: Record<string, unknown>) => string | number)
}
export type DeepDeltaOptions<T> = {
/** Policy per i campi array del payload (mappati per nome chiave) */
/**
* Optional per-array rules.
* Example:
* {
* hostnames: { identityKey: "hostname" }
* }
*/
arrays?: Partial<Record<Extract<keyof T, string>, ArrayPolicy>>
}
/**
* Delta<T> represents:
* - T fields that changed (primitives, objects, arrays)
* - "operations" dictionary describing what type of change (SetField, RemoveField, AddToCollection, etc.)
* - For primitive arrays: delta contains the full new array + SetField.
* - For identifiable object arrays: delta contains per-item changes.
*/
export type Delta<T> =
Partial<{
[K in keyof T]:
T[K] extends (infer U)[]
? DeltaArrayItem<(U & Identifiable)>[]
? (U extends object
? DeltaArrayItem<(U & Identifiable)>[] // object arrays → itemized
: U[]) // primitive arrays → full array
: T[K] extends object
? Delta<T[K] & OperationBag<Extract<keyof T, string>>>
: T[K]
}> & OperationBag<Extract<keyof T, string>>
/** Safe index per evitare TS2536 quando si indicizza su chiavi dinamiche */
const getArrayPolicy = <T>(options: DeepDeltaOptions<T> | undefined, key: string): ArrayPolicy | undefined =>{
/** Safe index to avoid TS2536 when addressing dynamic keys */
const getArrayPolicy = <T>(options: DeepDeltaOptions<T> | undefined, key: string): ArrayPolicy | undefined => {
const arrays = options?.arrays as Partial<Record<string, ArrayPolicy>> | undefined
return arrays?.[key]
}
@ -56,6 +90,16 @@ const getArrayPolicy = <T>(options: DeepDeltaOptions<T> | undefined, key: string
const isPlainObject = (value: unknown): value is PlainObject =>
typeof value === 'object' && value !== null && !Array.isArray(value)
/**
* Computes a deep "delta" object between formState and backupState.
*
* Rules:
* - Primitive fields SetField / RemoveField
* - Primitive arrays full replace (SetField)
* - Object arrays:
* * if items have id or identityKey itemized collection diff
* * otherwise full replace (SetField)
*/
export const deepDelta = <T extends Record<string, unknown>>(
formState: T,
backupState: T,
@ -63,11 +107,20 @@ export const deepDelta = <T extends Record<string, unknown>>(
): Delta<T> => {
const delta = {} as Delta<T>
// Sets an operation flag into the provided bag for a given key
const setOp = (bag: OperationBag, key: string, op: PatchOperation) => {
const ops = (bag.operations ??= {} as Record<string, PatchOperation>)
ops[key] = op
}
/**
* Recursive object diffing.
*
* Handles:
* - primitives
* - nested objects
* - arrays (delegates to array logic)
*/
const calculateDelta = (
form: PlainObject,
backup: PlainObject,
@ -82,18 +135,59 @@ export const deepDelta = <T extends Record<string, unknown>>(
// --- ARRAY ---
if (Array.isArray(formValue) && Array.isArray(backupValue)) {
const bothPrimitive =
(formValue as unknown[]).every(v => typeof v !== 'object' || v === null) &&
(backupValue as unknown[]).every(v => typeof v !== 'object' || v === null)
/**
* Detect primitive arrays (string[], number[], primitive unions).
* Primitive arrays have no identity always full replace.
*/
if (bothPrimitive) {
if (!deepEqual(formValue, backupValue)) {
;(parentDelta as Delta<T>)[key] = deepCopy(formValue) as unknown as Delta<T>[typeof key]
setOp(parentDelta, key, PatchOperation.SetField)
}
continue
}
// Object collections
const policy = getArrayPolicy(options, key)
/**
* If items have neither `id` nor `identityKey`, they cannot be diffed.
* => treat array as a scalar and replace entirely.
*/
const lacksIdentity =
!(policy?.identityKey) &&
(formValue as Identifiable[]).every(x => (x?.id ?? null) == null) &&
(backupValue as Identifiable[]).every(x => (x?.id ?? null) == null)
if (lacksIdentity) {
if (!deepEqual(formValue, backupValue)) {
;(parentDelta as Delta<T>)[key] = deepCopy(formValue) as unknown as Delta<T>[typeof key]
setOp(parentDelta, key, PatchOperation.SetField)
}
continue
}
/**
* Identifiable arrays => itemized delta with Add/Remove/Update
*/
const arrayDelta = calculateArrayDelta(
formValue as Identifiable[],
backupValue as Identifiable[],
policy
)
if (arrayDelta.length > 0) {
;(parentDelta as Delta<T>)[key] = arrayDelta as unknown as Delta<T>[typeof key]
}
continue
}
// --- OBJECT ---
if (isPlainObject(formValue) && isPlainObject(backupValue)) {
if (!deepEqual(formValue, backupValue)) {
@ -118,6 +212,16 @@ export const deepDelta = <T extends Record<string, unknown>>(
}
}
/**
* Computes itemized delta for identifiable object arrays.
*
* Handles:
* - Add: item without id or identity
* - Remove: item missing in formArray
* - Update: fields changed inside item
* - Re-parenting: rootKey changed
* - Role: if policy.deleteItemWhenRoleRemoved is true
*/
const calculateArrayDelta = <U extends Identifiable>(
formArray: U[],
backupArray: U[],
@ -125,7 +229,28 @@ export const deepDelta = <T extends Record<string, unknown>>(
): DeltaArrayItem<U>[] => {
const arrayDelta: DeltaArrayItem<U>[] = []
const getId = (item?: U): IdLike => (item ? item.id ?? null : null)
/**
* Identity resolution order:
* 1. If item has `.id` use it.
* 2. Else if identityKey is provided use that to extract a unique key.
* 3. Else: return null item will be treated as new.
*/
const resolveId = (item?: U): IdLike => {
if (!item) return null
const directId = (item as Identifiable).id
if (directId !== null && directId !== undefined) return directId
if (!policy?.identityKey) return null
if (typeof policy.identityKey === 'function') {
try { return policy.identityKey(item as unknown as Record<string, unknown>) }
catch { return null }
}
const k = policy.identityKey as string
const v = (item as unknown as Record<string, unknown>)[k]
return (typeof v === 'string' || typeof v === 'number') ? v : null
}
const childrenKeys = policy?.childArrayKeys ?? []
const dropChildren = policy?.dropChildrenOnRootChange ?? true
const roleKey = (policy?.roleFieldKey ?? 'role') as keyof U & string
@ -136,29 +261,29 @@ export const deepDelta = <T extends Record<string, unknown>>(
return (f as PlainObject)[rootKey] === (b as PlainObject)[rootKey]
}
// Mappe id → item per lookup veloce
// id → item maps for O(1) lookups
const formMap = new Map<string | number, U>()
const backupMap = new Map<string | number, U>()
for (const item of formArray) {
const id = getId(item)
const id = resolveId(item)
if (id !== null && id !== undefined) formMap.set(id as string | number, item)
}
for (const item of backupArray) {
const id = getId(item)
const id = resolveId(item)
if (id !== null && id !== undefined) backupMap.set(id as string | number, item)
}
// 1) Gestione elementi presenti nel form
// 1) Items present in the form array
for (const formItem of formArray) {
const fid = getId(formItem)
const fid = resolveId(formItem)
// 1.a) Nuovo item (senza id)
// 1.a) New item (no identity)
if (fid === null || fid === undefined) {
const addItem = {} as DeltaArrayItem<U>
Object.assign(addItem, formItem as Partial<U>)
addItem.operations = { collectionItemOperation: PatchOperation.AddToCollection }
// ⬇️ NON droppiamo i figli su "add": li normalizziamo come AddToCollection
// normalize children as AddToCollection
for (const ck of childrenKeys) {
const v = (addItem as PlainObject)[ck]
if (Array.isArray(v)) {
@ -168,7 +293,7 @@ export const deepDelta = <T extends Record<string, unknown>>(
c.operations = { collectionItemOperation: PatchOperation.AddToCollection }
return c
})
;(addItem as PlainObject)[ck] = normalized
;(addItem as PlainObject)[ck] = normalized
}
}
@ -176,15 +301,14 @@ export const deepDelta = <T extends Record<string, unknown>>(
continue
}
// 1.b) Ha id ma non esiste nel backup ⇒ AddToCollection
// 1.b) Has identity but not in backup ⇒ AddToCollection
const backupItem = backupMap.get(fid as string | number)
if (!backupItem) {
const addItem = {} as DeltaArrayItem<U>
Object.assign(addItem, formItem as Partial<U>)
addItem.id = fid as U['id']
addItem.id = fid as U['id'] // store identity for server convenience
addItem.operations = { collectionItemOperation: PatchOperation.AddToCollection }
// ⬇️ Anche qui: manteniamo i figli, marcandoli come AddToCollection
for (const ck of childrenKeys) {
const v = (addItem as PlainObject)[ck]
if (Array.isArray(v)) {
@ -194,7 +318,7 @@ export const deepDelta = <T extends Record<string, unknown>>(
c.operations = { collectionItemOperation: PatchOperation.AddToCollection }
return c
})
;(addItem as PlainObject)[ck] = normalized
;(addItem as PlainObject)[ck] = normalized
}
}
@ -202,28 +326,24 @@ export const deepDelta = <T extends Record<string, unknown>>(
continue
}
// 1.c) Re-parenting: root cambiata
// 1.c) Re-parenting: root changed
if (!sameRoot(formItem, backupItem)) {
// REMOVE vecchio
const removeItem = {} as DeltaArrayItem<U>
removeItem.id = fid as U['id']
removeItem.operations = { collectionItemOperation: PatchOperation.RemoveFromCollection }
arrayDelta.push(removeItem)
// ADD nuovo
const addItem = {} as DeltaArrayItem<U>
Object.assign(addItem, formItem as Partial<U>)
addItem.operations = { collectionItemOperation: PatchOperation.AddToCollection }
if (dropChildren) {
// ⬇️ SOLO qui, in caso di re-parenting e se richiesto, azzera i figli
for (const ck of childrenKeys) {
if (ck in (addItem as PlainObject)) {
;(addItem as PlainObject)[ck] = []
}
}
} else {
// Mantieni i figli marcandoli come AddToCollection
for (const ck of childrenKeys) {
const v = (addItem as PlainObject)[ck]
if (Array.isArray(v)) {
@ -232,7 +352,8 @@ export const deepDelta = <T extends Record<string, unknown>>(
Object.assign(c, child as Partial<Identifiable>)
c.operations = { collectionItemOperation: PatchOperation.AddToCollection }
return c
}); (addItem as PlainObject)[ck] = normalized
})
;(addItem as PlainObject)[ck] = normalized
}
}
}
@ -241,8 +362,7 @@ export const deepDelta = <T extends Record<string, unknown>>(
continue
}
// 1.d) Ruolo → null ⇒ rimozione item (se abilitato)
// 1.d) Role → null ⇒ remove item (if enabled)
const deleteOnRoleNull = policy?.deleteItemWhenRoleRemoved ?? true
if (deleteOnRoleNull) {
const formRole = (formItem as PlainObject)[roleKey]
@ -257,14 +377,14 @@ export const deepDelta = <T extends Record<string, unknown>>(
}
}
// 1.e) Diff puntuale su campi
// 1.e) Field-level diff
const itemDeltaBase = {} as (PlainObject & OperationBag & { id?: U['id'] })
itemDeltaBase.id = fid as U['id']
calculateDelta(
formItem as PlainObject,
backupItem as PlainObject,
itemDeltaBase
formItem as PlainObject,
backupItem as PlainObject,
itemDeltaBase
)
const hasMeaningfulChanges = Object.keys(itemDeltaBase).some(k => k !== 'id')
@ -273,9 +393,9 @@ export const deepDelta = <T extends Record<string, unknown>>(
}
}
// 2) Elementi rimossi
// 2) Items removed
for (const backupItem of backupArray) {
const bid = getId(backupItem)
const bid = resolveId(backupItem)
if (bid === null || bid === undefined) continue
if (!formMap.has(bid as string | number)) {
const removeItem = {} as DeltaArrayItem<U>
@ -297,6 +417,14 @@ export const deepDelta = <T extends Record<string, unknown>>(
return delta
}
/**
* Checks whether any operations exist inside the delta.
*
* A delta has operations if:
* - parent-level operations exist, or
* - nested object deltas contain operations, or
* - any array item contains operations.
*/
export const deltaHasOperations = <T extends Record<string, unknown>>(delta: Delta<T>): boolean => {
if (!isPlainObject(delta)) return false
if ('operations' in delta && isPlainObject(delta.operations)) return true

View File

@ -0,0 +1,7 @@
import {
saveBinaryToDisk
} from './saveBinaryToDisk'
export {
saveBinaryToDisk
}

View File

@ -0,0 +1,23 @@
/**
* Saves binary data to disk by creating a downloadable link.
* @param data The binary data to save (ArrayBuffer or Blob).
* @param filename The desired filename for the saved file.
*/
const saveBinaryToDisk = (data: ArrayBuffer | Blob, filename: string) => {
const blob = data instanceof Blob ? data : new Blob([data])
const url = URL.createObjectURL(blob)
const a = document.createElement('a')
a.href = url
a.download = filename
document.body.appendChild(a)
a.click()
a.remove()
setTimeout(() => URL.revokeObjectURL(url), 1000)
}
export {
saveBinaryToDisk
}

View File

@ -0,0 +1,44 @@
/**
* Extracts filename from HTTP headers.
* @param headers The HTTP headers object.
* @param fallbackName The fallback filename if none found in headers.
* @return The extracted filename or the fallback name.
*/
const extractFilenameFromHeaders = (
headers: Record<string, string>,
fallbackName: string = 'download.bin'
): string => {
const cd = headers['content-disposition']
if (!cd) {
return fallbackName
}
// RFC 5987 — filename*=UTF-8''encoded-name
const matchEncoded = /filename\*=\s*UTF-8''([^;]+)/i.exec(cd)
if (matchEncoded && matchEncoded[1]) {
try {
return decodeURIComponent(matchEncoded[1])
} catch {
return matchEncoded[1]
}
}
// Standard — filename="quoted"
const matchQuoted = /filename="([^"]+)"/i.exec(cd)
if (matchQuoted && matchQuoted[1]) {
return matchQuoted[1]
}
// Standard — filename=plain
const matchPlain = /filename=([^;]+)/i.exec(cd)
if (matchPlain && matchPlain[1]) {
return matchPlain[1].trim()
}
return fallbackName
}
export {
extractFilenameFromHeaders
}

View File

@ -0,0 +1,7 @@
import {
extractFilenameFromHeaders
} from './extractFilenameFromHeaders'
export {
extractFilenameFromHeaders
}

View File

@ -31,10 +31,20 @@ import {
parseAclEntries
} from './acl'
import {
saveBinaryToDisk
} from './file'
import {
extractFilenameFromHeaders
} from './headers'
export {
// date
isValidISODateString,
formatISODateString,
// deep
deepCopy,
deepDelta,
deltaHasOperations,
@ -42,6 +52,7 @@ export {
deepMerge,
deepPatternMatch,
// enum
enumToArr,
enumToObj,
enumToString,
@ -50,8 +61,16 @@ export {
hasFlag,
hasAnyFlag,
// isGuid
isGuid,
// acl
parseAclEntry,
parseAclEntries
parseAclEntries,
// file
saveBinaryToDisk,
// headers
extractFilenameFromHeaders
}

View File

@ -5,7 +5,7 @@ namespace MaksIT.Models.LetsEncryptServer.Account.Requests;
public class PatchAccountRequest : PatchRequestModelBase {
public string Description { get; set; }
public string? Description { get; set; }
public bool? IsDisabled { get; set; }

View File

@ -11,7 +11,7 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="MaksIT.Core" Version="1.5.1" />
<PackageReference Include="MaksIT.Core" Version="1.5.2" />
</ItemGroup>
</Project>