diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..ed3ad45 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,43 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## v1.6.1 + +### Added +- Added `CreateMutex` method to `BaseFileLogger` +- Added `ResolveFolderPath` and `SanitizeForPath` methods to `FileLoggerProvider` +- Added `ResolveFolderPath` and `SanitizeForPath` methods to `JsonFileLoggerProvider` +- Added `LoggerPrefix` class for managing logger prefixes +- AI assisted CHANGELOG.md generation +- + +### Changed +- Improved error handling in `BaseFileLogger` + + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..33643c8 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,168 @@ +# Contributing to MaksIT.Core + +Thank you for your interest in contributing to MaksIT.Core! This document provides guidelines for contributing to the project. + +## Getting Started + +1. Fork the repository +2. Clone your fork locally +3. Create a new branch for your changes +4. Make your changes +5. Submit a pull request + +## Development Setup + +### Prerequisites + +- .NET10 SDK or later +- Git + +### Building the Project + +```bash +cd src +dotnet build MaksIT.Core.sln +``` + +### Running Tests + +```bash +cd src +dotnet test MaksIT.Core.Tests +``` + +## Commit Message Format + +This project uses the following commit message format: + +``` +(type): description +``` + +### Commit Types + +| Type | Description | +|------|-------------| +| `(feature):` | New feature or enhancement | +| `(bugfix):` | Bug fix | +| `(refactor):` | Code refactoring without functional changes | +| `(chore):` | Maintenance tasks (dependencies, CI, documentation) | + +### Examples + +``` +(feature): add support for custom JWT claims +(bugfix): fix multithreading issue in file logger +(refactor): simplify expression extension methods +(chore): update copyright year to 2026 +``` + +### Guidelines + +- Use lowercase for the description +- Keep the description concise but descriptive +- No period at the end of the description + +## Code Style + +- Follow standard C# naming conventions +- Use XML documentation comments for public APIs +- Keep methods focused and single-purpose +- Write unit tests for new functionality + +## Pull Request Process + +1. Ensure all tests pass +2. Update documentation if needed +3. Update CHANGELOG.md with your changes under the appropriate version section +4. Submit your pull request against the `main` branch + +## Versioning + +This project follows [Semantic Versioning](https://semver.org/): + +- **MAJOR** - Breaking changes +- **MINOR** - New features (backward compatible) +- **PATCH** - Bug fixes (backward compatible) + +## Release Process + +The release process is automated via PowerShell scripts in the `src/` directory. + +### Prerequisites + +- Docker Desktop running (for Linux tests) +- GitHub CLI (`gh`) authenticated +- NuGet API key in `NUGET_API_KEY` environment variable +- GitHub token in `GITHUB_MAKS_IT_COM` environment variable + +### Release Workflow + +1. **Update version** in `MaksIT.Core/MaksIT.Core.csproj` + +2. **Generate changelog** (uses AI with Ollama if available): + ```powershell + cd src + .\Generate-Changelog.ps1 # Updates CHANGELOG.md and LICENSE.md year + .\Generate-Changelog.ps1 -DryRun # Preview without changes + ``` + +3. **Review and commit** all changes: + ```bash + git add -A + git commit -m "(chore): release v1.x.x" + ``` + +4. **Create version tag**: + ```bash + git tag v1.x.x + ``` + +5. **Run release script**: + ```powershell + cd src + .\Release-NuGetPackage.ps1 # Full release + .\Release-NuGetPackage.ps1 -DryRun # Test without publishing + ``` + +### How Release Works + +The release script: + +1. **Reads latest version** from `CHANGELOG.md` +2. **Finds the commit** with the matching version tag (e.g., `v1.2.3`) +3. **Checks if already released** on NuGet.org - skips if yes +4. **Builds and tests** the tagged commit +5. **Publishes** to NuGet and GitHub + +You can run the release script from any branch or commit - it will always release the commit that has the version tag matching the latest changelog entry. + +### Release Script Validation + +- **Version source**: Reads latest version from `CHANGELOG.md` +- **Tag required**: Must have a tag matching the changelog version +- **Branch validation**: Tag must be on configured branch (default: `main`, set in `scriptsettings.json`) +- **Already released**: Skips if version exists on NuGet.org +- **Clean working directory**: No uncommitted changes allowed + +### What the Release Script Does + +1. Validates prerequisites and environment +2. Runs security vulnerability scan +3. Builds and tests on Windows +4. Builds and tests on Linux (via Docker) +5. Analyzes code coverage +6. Creates NuGet package +7. Pushes to NuGet.org +8. Creates GitHub release with assets + +### Re-releasing + +To re-release the same version (e.g., to fix release assets): +- Keep the same tag on the same commit +- Run the release script again +- It will delete the existing GitHub release and recreate it + +## License + +By contributing, you agree that your contributions will be licensed under the MIT License. diff --git a/LICENSE.md b/LICENSE.md index edbe7fe..915cfc6 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2024 - 2025 Maksym Sadovnychyy (MAKS-IT) +Copyright (c) 2024 - 2026 Maksym Sadovnychyy (MAKS-IT) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index aa65ae1..a99ba55 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# MaksIT.Core Library Documentation +# MaksIT.Core Library Documentation ## Table of Contents @@ -16,6 +16,7 @@ - [Logging](#logging) - [File Logger](#file-logger) - [JSON File Logger](#json-file-logger) + - [Logger Prefix](#logger-prefix) - [Threading](#threading) - [Lock Manager](#lock-manager) - [Networking](#networking) @@ -31,7 +32,10 @@ - [JWK Thumbprint Utility](#jwk-thumbprint-utility) - [JWS Generator](#jws-generator) - [TOTP Generator](#totp-generator) -- [Web API Models](#web-api-models) +- [Web API](#web-api) + - [Paged Request](#paged-request) + - [Paged Response](#paged-response) + - [Patch Operation](#patch-operation) - [Sagas](#sagas) - [CombGuidGenerator](#combguidgenerator) - [Others](#others) @@ -664,6 +668,9 @@ The `FileLogger` class in the `MaksIT.Core.Logging` namespace provides a simple 3. **Thread Safety**: - Ensures safe concurrent writes to the log file using the `LockManager`. +4. **Folder-Based Logging**: + - Organize logs into subfolders using the `LoggerPrefix` feature. + #### Example Usage ```csharp @@ -691,6 +698,9 @@ The `JsonFileLogger` class in the `MaksIT.Core.Logging` namespace provides struc 3. **Thread Safety**: - Ensures safe concurrent writes to the log file using the `LockManager`. +4. **Folder-Based Logging**: + - Organize logs into subfolders using the `LoggerPrefix` feature. + #### Example Usage ```csharp @@ -703,6 +713,92 @@ logger.LogInformation("Logging to JSON file!"); --- +### Logger Prefix + +The `LoggerPrefix` class in the `MaksIT.Core.Logging` namespace provides a type-safe way to specify logger categories with special prefixes. It extends the `Enumeration` base class and enables organizing logs into subfolders or applying custom categorization without using magic strings. + +#### Features + +1. **Type-Safe Prefixes**: + - Avoid magic strings by using strongly-typed prefix constants. + +2. **Folder-Based Organization**: + - Use `LoggerPrefix.Folder` to write logs to specific subfolders. + +3. **Extensible Categories**: + - Additional prefixes like `LoggerPrefix.Category` and `LoggerPrefix.Tag` are available for future use. + +4. **Automatic Parsing**: + - Parse category names to extract prefix and value using `LoggerPrefix.Parse()`. + +5. **Backward Compatible**: + - Standard `ILogger` usage remains unchanged; prefixes are only applied when explicitly used. + +#### Available Prefixes + +| Prefix | Purpose | +|--------|---------| +| `LoggerPrefix.Folder` | Writes logs to a subfolder with the specified name | +| `LoggerPrefix.Category` | Reserved for categorization (future use) | +| `LoggerPrefix.Tag` | Reserved for tagging (future use) | + +#### Example Usage + +##### Creating a Logger with a Folder Prefix +```csharp +var services = new ServiceCollection(); +services.AddLogging(builder => builder.AddFileLogger("logs", TimeSpan.FromDays(7))); + +var provider = services.BuildServiceProvider(); +var loggerFactory = provider.GetRequiredService(); + +// Create a logger that writes to logs/Audit/log_yyyy-MM-dd.txt +var auditLogger = loggerFactory.CreateLogger(LoggerPrefix.Folder.WithValue("Audit")); +auditLogger.LogInformation("Audit event occurred"); + +// Create a logger that writes to logs/Orders/log_yyyy-MM-dd.txt +var ordersLogger = loggerFactory.CreateLogger(LoggerPrefix.Folder.WithValue("Orders")); +ordersLogger.LogInformation("Order processed"); +``` + +##### Standard ILogger Usage (Unchanged) +```csharp +// Standard usage - logs go to the default folder (logs/log_yyyy-MM-dd.txt) +var logger = provider.GetRequiredService>(); +logger.LogInformation("Standard log message"); +``` + +##### Parsing a Category Name +```csharp +var categoryName = "Folder:Audit"; +var (prefix, value) = LoggerPrefix.Parse(categoryName); + +if (prefix == LoggerPrefix.Folder) { + Console.WriteLine($"Folder: {value}"); // Output: Folder: Audit +} +``` + +#### Result + +| Logger Creation | Log File Location | +|-----------------|-------------------| +| `ILogger` | `logs/log_2026-01-30.txt` | +| `CreateLogger(LoggerPrefix.Folder.WithValue("Audit"))` | `logs/Audit/log_2026-01-30.txt` | +| `CreateLogger(LoggerPrefix.Folder.WithValue("Orders"))` | `logs/Orders/log_2026-01-30.txt` | + +#### Best Practices + +1. **Use Type-Safe Prefixes**: + - Always use `LoggerPrefix.Folder.WithValue()` instead of raw strings like `"Folder:Audit"`. + +2. **Organize by Domain**: + - Use meaningful folder names to organize logs by domain (e.g., "Audit", "Orders", "Security"). + +3. **Keep Default Logging Simple**: + - Use standard `ILogger` for general application logging and folder prefixes for specialized logs. + +--- + ## Threading ### Lock Manager @@ -1123,7 +1219,7 @@ using System.Security.Cryptography; using MaksIT.Core.Security.JWK; using var rsa = RSA.Create(2048); -JwkGenerator.TryGenerateFromRCA(rsa, out var jwk, out var errorMessage); +JwkGenerator.TryGenerateFromRSA(rsa, out var jwk, out var errorMessage); var result = JwkThumbprintUtility.TryGetSha256Thumbprint(jwk!, out var thumbprint, out var error); if (result) { @@ -1146,7 +1242,6 @@ if (result) else { Console.WriteLine($"Error: {error}"); - } } ``` @@ -1182,6 +1277,199 @@ public static bool TryGetKeyAuthorization( --- +### TOTP Generator + +The `TotpGenerator` class in the `MaksIT.Core.Security` namespace provides methods for generating and validating Time-based One-Time Passwords (TOTP) for two-factor authentication. + +--- + +#### Features + +1. **TOTP Validation**: + - Validate TOTP codes against a shared secret with configurable time tolerance. + +2. **TOTP Generation**: + - Generate TOTP codes from a Base32-encoded secret. + +3. **Secret Generation**: + - Generate cryptographically secure Base32 secrets for TOTP setup. + +4. **Recovery Codes**: + - Generate backup recovery codes for account recovery. + +5. **Auth Link Generation**: + - Generate `otpauth://` URIs for QR code scanning in authenticator apps. + +--- + +#### Example Usage + +##### Generating a Secret +```csharp +TotpGenerator.TryGenerateSecret(out var secret, out var error); +// secret is a Base32-encoded string for use with authenticator apps +``` + +##### Validating a TOTP Code +```csharp +var timeTolerance = 1; // Allow 1 time step before/after current +TotpGenerator.TryValidate(totpCode, secret, timeTolerance, out var isValid, out var error); +if (isValid) { + Console.WriteLine("TOTP is valid"); +} +``` + +##### Generating Recovery Codes +```csharp +TotpGenerator.TryGenerateRecoveryCodes(10, out var recoveryCodes, out var error); +// recoveryCodes contains 10 codes in format "XXXX-XXXX" +``` + +##### Generating an Auth Link for QR Code +```csharp +TotpGenerator.TryGenerateTotpAuthLink( + "MyApp", + "user@example.com", + secret, + "MyApp", + null, // algorithm (default SHA1) + null, // digits (default 6) + null, // period (default 30) + out var authLink, + out var error +); +// authLink = "otpauth://totp/MyApp:user@example.com?secret=...&issuer=MyApp" +``` + +--- + +## Web API + +The `Webapi` namespace provides models and utilities for building Web APIs, including pagination support and patch operations. + +--- + +### Paged Request + +The `PagedRequest` class in the `MaksIT.Core.Webapi.Models` namespace provides a base class for paginated API requests with filtering and sorting capabilities. + +#### Features + +1. **Pagination**: + - Configure page size and page number for paginated results. + +2. **Dynamic Filtering**: + - Build filter expressions from string-based filter queries. + +3. **Dynamic Sorting**: + - Build sort expressions with ascending/descending order. + +#### Properties + +| Property | Type | Default | Description | +|----------|------|---------|-------------| +| `PageSize` | `int` | `100` | Number of items per page | +| `PageNumber` | `int` | `1` | Current page number | +| `Filters` | `string?` | `null` | Filter expression string | +| `SortBy` | `string?` | `null` | Property name to sort by | +| `IsAscending` | `bool` | `true` | Sort direction | + +#### Example Usage + +```csharp +var request = new PagedRequest { + PageSize = 20, + PageNumber = 1, + Filters = "Name.Contains(\"John\") && Age > 18", + SortBy = "Name", + IsAscending = true +}; + +var filterExpression = request.BuildFilterExpression(); +var sortExpression = request.BuildSortExpression(); + +var results = dbContext.Users + .Where(filterExpression) + .OrderBy(sortExpression) + .Skip((request.PageNumber - 1) * request.PageSize) + .Take(request.PageSize) + .ToList(); +``` + +--- + +### Paged Response + +The `PagedResponse` class in the `MaksIT.Core.Webapi.Models` namespace provides a generic wrapper for paginated API responses. + +#### Properties + +| Property | Type | Description | +|----------|------|-------------| +| `Items` | `IEnumerable` | The items for the current page | +| `PageNumber` | `int` | Current page number | +| `PageSize` | `int` | Number of items per page | +| `TotalCount` | `int` | Total number of items across all pages | +| `TotalPages` | `int` | Calculated total number of pages | +| `HasPreviousPage` | `bool` | Whether a previous page exists | +| `HasNextPage` | `bool` | Whether a next page exists | + +#### Example Usage + +```csharp +var items = await dbContext.Users + .Skip((pageNumber - 1) * pageSize) + .Take(pageSize) + .ToListAsync(); + +var totalCount = await dbContext.Users.CountAsync(); + +var response = new PagedResponse(items, totalCount, pageNumber, pageSize); + +// response.TotalPages, response.HasNextPage, etc. are automatically calculated +``` + +--- + +### Patch Operation + +The `PatchOperation` enum in the `MaksIT.Core.Webapi.Models` namespace defines operations for partial updates (PATCH requests). + +#### Values + +| Value | Description | +|-------|-------------| +| `SetField` | Set or replace a normal field value | +| `RemoveField` | Set a field to null | +| `AddToCollection` | Add an item to a collection property | +| `RemoveFromCollection` | Remove an item from a collection property | + +#### Example Usage + +```csharp +public class UserPatchRequest : PatchRequestModelBase { + public PatchOperation Operation { get; set; } + public string PropertyName { get; set; } + public object? Value { get; set; } +} + +// Example: Set a field +var patch = new UserPatchRequest { + Operation = PatchOperation.SetField, + PropertyName = "Name", + Value = "New Name" +}; + +// Example: Add to collection +var patch = new UserPatchRequest { + Operation = PatchOperation.AddToCollection, + PropertyName = "Roles", + Value = "Admin" +}; +``` + +--- + ## Others ### Culture diff --git a/src/BuildUtils.psm1 b/src/BuildUtils.psm1 new file mode 100644 index 0000000..835619f --- /dev/null +++ b/src/BuildUtils.psm1 @@ -0,0 +1,1054 @@ +<# +.SYNOPSIS + Build utilities module for PowerShell scripts. + +.DESCRIPTION + Provides reusable functions for build/release scripts: + - Step timing and progress tracking + - Prerequisite/command validation + - Git status utilities + - Console output helpers + +.USAGE + Import-Module .\BuildUtils.psm1 + + Initialize-StepTimer + Start-Step "Building project" + # ... do work ... + Complete-Step "OK" + Show-TimingSummary +#> + +# ============================================================================== +# MODULE STATE +# ============================================================================== + +$script:StepTimerState = @{ + TotalStopwatch = $null + CurrentStep = $null + StepTimings = @() +} + +# ============================================================================== +# STEP TIMING FUNCTIONS +# ============================================================================== + +function Initialize-StepTimer { + <# + .SYNOPSIS + Initialize the step timer. Call at the start of your script. + #> + $script:StepTimerState.TotalStopwatch = [System.Diagnostics.Stopwatch]::StartNew() + $script:StepTimerState.StepTimings = @() + $script:StepTimerState.CurrentStep = $null +} + +function Start-Step { + <# + .SYNOPSIS + Start timing a new step with console output. + .PARAMETER Name + Name/description of the step. + #> + param([Parameter(Mandatory)][string]$Name) + + $script:StepTimerState.CurrentStep = @{ + Name = $Name + Stopwatch = [System.Diagnostics.Stopwatch]::StartNew() + } + Write-Host "" + Write-Host "[$([DateTime]::Now.ToString('HH:mm:ss'))] $Name..." -ForegroundColor Cyan +} + +function Complete-Step { + <# + .SYNOPSIS + Complete the current step and record timing. + .PARAMETER Status + Status of the step: OK, SKIP, FAIL, WARN + #> + param([string]$Status = "OK") + + $step = $script:StepTimerState.CurrentStep + if ($step) { + $step.Stopwatch.Stop() + $elapsed = $step.Stopwatch.Elapsed + $script:StepTimerState.StepTimings += @{ + Name = $step.Name + Duration = $elapsed + Status = $Status + } + + $timeStr = "{0:mm\:ss\.fff}" -f $elapsed + $color = switch ($Status) { + "OK" { "Green" } + "SKIP" { "Yellow" } + "WARN" { "Yellow" } + default { "Red" } + } + + if ($Status -eq "SKIP") { + Write-Host " Skipped" -ForegroundColor $color + } + else { + $prefix = if ($Status -eq "OK") { "Completed" } else { "Failed" } + Write-Host " $prefix in $timeStr" -ForegroundColor $color + } + } +} + +function Get-StepTimings { + <# + .SYNOPSIS + Get the recorded step timings. + .OUTPUTS + Array of step timing objects. + #> + return $script:StepTimerState.StepTimings +} + +function Show-TimingSummary { + <# + .SYNOPSIS + Display a summary of all step timings. + #> + Write-Host "" + Write-Host "==========================================" + Write-Host "TIMING SUMMARY" + Write-Host "==========================================" + + foreach ($step in $script:StepTimerState.StepTimings) { + $timeStr = "{0:mm\:ss\.fff}" -f $step.Duration + $status = $step.Status + $color = switch ($status) { + "OK" { "Green" } + "SKIP" { "Yellow" } + "WARN" { "Yellow" } + default { "Red" } + } + Write-Host (" [{0,-4}] {1,-40} {2}" -f $status, $step.Name, $timeStr) -ForegroundColor $color + } + + if ($script:StepTimerState.TotalStopwatch) { + $script:StepTimerState.TotalStopwatch.Stop() + $totalTime = "{0:mm\:ss\.fff}" -f $script:StepTimerState.TotalStopwatch.Elapsed + Write-Host "----------------------------------------" + Write-Host " Total: $totalTime" -ForegroundColor Cyan + } +} + +# ============================================================================== +# PREREQUISITE FUNCTIONS +# ============================================================================== + +function Test-CommandExists { + <# + .SYNOPSIS + Check if a command exists. + .PARAMETER Command + Command name to check. + .OUTPUTS + Boolean indicating if command exists. + #> + param([Parameter(Mandatory)][string]$Command) + return [bool](Get-Command $Command -ErrorAction SilentlyContinue) +} + +function Assert-Command { + <# + .SYNOPSIS + Assert that a command exists, exit if not. + .PARAMETER Command + Command name to check. + .PARAMETER ExitCode + Exit code to use if command is missing (default: 1). + #> + param( + [Parameter(Mandatory)][string]$Command, + [int]$ExitCode = 1 + ) + + if (-not (Test-CommandExists $Command)) { + Write-Error "Required command '$Command' is not available. Aborting." + exit $ExitCode + } +} + +function Assert-Commands { + <# + .SYNOPSIS + Assert that multiple commands exist. + .PARAMETER Commands + Array of command names to check. + #> + param([Parameter(Mandatory)][string[]]$Commands) + + foreach ($cmd in $Commands) { + Assert-Command $cmd + } +} + +function Test-EnvironmentVariable { + <# + .SYNOPSIS + Check if an environment variable is set. + .PARAMETER Name + Environment variable name. + .OUTPUTS + Boolean indicating if variable is set and not empty. + #> + param([Parameter(Mandatory)][string]$Name) + + $value = [Environment]::GetEnvironmentVariable($Name) + return -not [string]::IsNullOrWhiteSpace($value) +} + +function Assert-EnvironmentVariable { + <# + .SYNOPSIS + Assert that an environment variable is set, exit if not. + .PARAMETER Name + Environment variable name. + .PARAMETER ExitCode + Exit code to use if variable is missing (default: 1). + #> + param( + [Parameter(Mandatory)][string]$Name, + [int]$ExitCode = 1 + ) + + if (-not (Test-EnvironmentVariable $Name)) { + Write-Error "Required environment variable '$Name' is not set. Aborting." + exit $ExitCode + } +} + +# ============================================================================== +# GIT UTILITIES +# ============================================================================== + +function Get-GitStatus { + <# + .SYNOPSIS + Get git status as structured object. + .OUTPUTS + Object with Staged, Modified, Untracked arrays and IsClean boolean. + #> + $status = @{ + Staged = @() + Modified = @() + Untracked = @() + Deleted = @() + IsClean = $true + } + + $statusLines = git status --porcelain 2>$null + if (-not $statusLines) { return $status } + + $status.IsClean = $false + + foreach ($line in ($statusLines -split "`n")) { + if ([string]::IsNullOrWhiteSpace($line)) { continue } + + $index = $line.Substring(0, 1) + $workTree = $line.Substring(1, 1) + $file = $line.Substring(3) + + # Staged changes + if ($index -match '[MADRC]') { + $status.Staged += $file + } + # Unstaged modifications + if ($workTree -eq 'M') { + $status.Modified += $file + } + # Deleted files + if ($index -eq 'D' -or $workTree -eq 'D') { + $status.Deleted += $file + } + # Untracked files + if ($index -eq '?' -and $workTree -eq '?') { + $status.Untracked += $file + } + } + + return $status +} + +function Show-GitStatus { + <# + .SYNOPSIS + Display git status in a formatted, colored output. + .PARAMETER Status + Git status object from Get-GitStatus (optional, will fetch if not provided). + #> + param([hashtable]$Status) + + if (-not $Status) { + $Status = Get-GitStatus + } + + if ($Status.IsClean) { + Write-Host " Working directory is clean" -ForegroundColor Green + return + } + + if ($Status.Staged.Count -gt 0) { + Write-Host " Staged ($($Status.Staged.Count)):" -ForegroundColor Green + $Status.Staged | ForEach-Object { Write-Host " + $_" -ForegroundColor Green } + } + if ($Status.Modified.Count -gt 0) { + Write-Host " Modified ($($Status.Modified.Count)):" -ForegroundColor Yellow + $Status.Modified | ForEach-Object { Write-Host " M $_" -ForegroundColor Yellow } + } + if ($Status.Deleted.Count -gt 0) { + Write-Host " Deleted ($($Status.Deleted.Count)):" -ForegroundColor Red + $Status.Deleted | ForEach-Object { Write-Host " D $_" -ForegroundColor Red } + } + if ($Status.Untracked.Count -gt 0) { + Write-Host " Untracked ($($Status.Untracked.Count)):" -ForegroundColor Cyan + $Status.Untracked | ForEach-Object { Write-Host " ? $_" -ForegroundColor Cyan } + } +} + +function Get-CurrentBranch { + <# + .SYNOPSIS + Get the current git branch name. + .OUTPUTS + Branch name string or $null if not in a git repo. + #> + try { + $branch = git rev-parse --abbrev-ref HEAD 2>$null + if ($LASTEXITCODE -eq 0) { return $branch } + } catch { } + return $null +} + +function Get-LastTag { + <# + .SYNOPSIS + Get the most recent git tag. + .OUTPUTS + Tag name string or $null if no tags exist. + #> + try { + $tag = git describe --tags --abbrev=0 2>$null + if ($LASTEXITCODE -eq 0) { return $tag } + } catch { } + return $null +} + +function Get-CommitsSinceTag { + <# + .SYNOPSIS + Get commits since a specific tag (or all commits if no tag). + .PARAMETER Tag + Tag to start from (optional, uses last tag if not specified). + .PARAMETER Format + Output format: oneline, full, hash (default: oneline). + .OUTPUTS + Array of commit strings. + #> + param( + [string]$Tag, + [ValidateSet("oneline", "full", "hash")] + [string]$Format = "oneline" + ) + + if (-not $Tag) { + $Tag = Get-LastTag + } + + $formatArg = switch ($Format) { + "oneline" { "--oneline" } + "full" { "--format=full" } + "hash" { "--format=%H" } + } + + try { + if ($Tag) { + $commits = git log "$Tag..HEAD" $formatArg --no-merges 2>$null + } + else { + $commits = git log -50 $formatArg --no-merges 2>$null + } + + if ($commits) { + return $commits -split "`n" | Where-Object { $_.Trim() -ne "" } + } + } catch { } + + return @() +} + +function Get-VersionBumpCommit { + <# + .SYNOPSIS + Find the commit where a version string was introduced in a file. + .PARAMETER Version + Version string to search for (e.g., "1.6.1"). + .PARAMETER FilePath + File path to search in (e.g., "*.csproj"). + .OUTPUTS + Commit hash where version first appeared, or $null. + #> + param( + [Parameter(Mandatory)][string]$Version, + [string]$FilePath = "*.csproj" + ) + + try { + # Find commit that introduced this version string + $commit = git log -S "$Version" --format="%H" --reverse -- $FilePath 2>$null | Select-Object -First 1 + if ($commit) { return $commit.Trim() } + + # Try alternative format (without tags) + $commit = git log -S "$Version" --format="%H" --reverse -- $FilePath 2>$null | Select-Object -First 1 + if ($commit) { return $commit.Trim() } + } catch { } + + return $null +} + +function Get-CommitsForVersion { + <# + .SYNOPSIS + Get commits for a specific version (from previous version to HEAD). + Designed for pre-commit workflow: version is bumped locally but not yet committed. + .PARAMETER Version + Version string (e.g., "1.6.1") - the NEW version being prepared. + .PARAMETER CsprojPath + Path to csproj file (absolute or relative). + .PARAMETER Format + Output format: oneline, full, hash, detailed (default: oneline). + "detailed" includes commit message + changed files. + .OUTPUTS + Array of commit strings for this version. + #> + param( + [Parameter(Mandatory)][string]$Version, + [string]$CsprojPath = "*.csproj", + [ValidateSet("oneline", "full", "hash", "detailed")] + [string]$Format = "oneline" + ) + + # Get git repo root for path conversion + $gitRoot = (git rev-parse --show-toplevel 2>$null) + if ($gitRoot) { $gitRoot = $gitRoot.Trim() } + + # Get path relative to git root using git itself (handles drive letter issues) + function ConvertTo-GitPath { + param([string]$Path) + if (-not $Path) { return $null } + + # If it's a relative path, use it directly + if (-not [System.IO.Path]::IsPathRooted($Path)) { + # Get path relative to repo root by combining with current dir offset + $cwdRelative = git rev-parse --show-prefix 2>$null + if ($cwdRelative) { + $cwdRelative = $cwdRelative.Trim().TrimEnd('/') + if ($cwdRelative) { + return "$cwdRelative/$($Path -replace '\\', '/')" + } + } + return $Path -replace '\\', '/' + } + + # For absolute paths, try to make relative using git + Push-Location (Split-Path $Path -Parent) -ErrorAction SilentlyContinue + try { + $prefix = git rev-parse --show-prefix 2>$null + if ($prefix) { + $prefix = $prefix.Trim().TrimEnd('/') + $filename = Split-Path $Path -Leaf + if ($prefix) { + return "$prefix/$filename" + } + return $filename + } + } + finally { + Pop-Location -ErrorAction SilentlyContinue + } + + # Fallback: normalize to forward slashes + return $Path -replace '\\', '/' + } + + # Find actual csproj file if glob pattern + $actualCsprojPath = $CsprojPath + if ($CsprojPath -match '\*') { + $found = Get-ChildItem -Path $CsprojPath -Recurse -ErrorAction SilentlyContinue | + Where-Object { $_.Name -match '\.csproj$' } | + Select-Object -First 1 + if ($found) { $actualCsprojPath = $found.FullName } + } + + $gitCsprojPath = ConvertTo-GitPath $actualCsprojPath + + # Determine commit range + $range = "" + try { + # Check if this version is already committed + $versionCommit = Get-VersionBumpCommit -Version $Version -FilePath $CsprojPath + + if ($versionCommit) { + # Version already in git history - get commits from that point + $range = "$versionCommit^..HEAD" + } + else { + # Version NOT committed yet (normal pre-commit workflow) + # Find the PREVIOUS version from the committed csproj + if ($gitCsprojPath) { + $committedContent = git show "HEAD:$gitCsprojPath" 2>$null + if ($committedContent) { + $prevVersionMatch = [regex]::Match(($committedContent -join "`n"), '([^<]+)') + if ($prevVersionMatch.Success) { + $prevVersion = $prevVersionMatch.Groups[1].Value + # Find when previous version was introduced + $prevCommit = Get-VersionBumpCommit -Version $prevVersion -FilePath $CsprojPath + if ($prevCommit) { + # Get commits AFTER previous version was set (these are unreleased) + $range = "$prevCommit..HEAD" + } + } + } + } + + # Fallback to last tag if still no range + if (-not $range) { + $lastTag = Get-LastTag + if ($lastTag) { + $range = "$lastTag..HEAD" + } + } + } + } catch { } + + + # For detailed format, get commit + files changed + if ($Format -eq "detailed") { + return Get-DetailedCommits -Range $range + } + + $formatArg = switch ($Format) { + "oneline" { "--oneline" } + "full" { "--format=full" } + "hash" { "--format=%H" } + } + + try { + if ($range) { + $commits = git log $range $formatArg --no-merges 2>$null + } + else { + $commits = git log -30 $formatArg --no-merges 2>$null + } + + if ($commits) { + return $commits -split "`n" | Where-Object { $_.Trim() -ne "" } + } + } catch { } + + return @() +} + +function Get-DetailedCommits { + <# + .SYNOPSIS + Get detailed commit info including changed files. + .PARAMETER Range + Git commit range (e.g., "v1.0.0..HEAD"). + .PARAMETER MaxCommits + Maximum commits to return (default: 50). + .OUTPUTS + Array of formatted strings: "hash message [files: file1, file2, ...]" + #> + param( + [string]$Range, + [int]$MaxCommits = 50 + ) + + $results = @() + + try { + # Get commit hashes + if ($Range) { + $hashes = git log $Range --format="%H" --no-merges -n $MaxCommits 2>$null + } + else { + $hashes = git log --format="%H" --no-merges -n $MaxCommits 2>$null + } + + if (-not $hashes) { return @() } + + $hashArray = $hashes -split "`n" | Where-Object { $_.Trim() -ne "" } + + foreach ($hash in $hashArray) { + $hash = $hash.Trim() + if (-not $hash) { continue } + + # Get commit message (first line) + $message = git log -1 --format="%s" $hash 2>$null + if (-not $message) { continue } + + # Get changed files (source files only) + $files = git diff-tree --no-commit-id --name-only -r $hash 2>$null + $sourceFiles = @() + if ($files) { + $sourceFiles = ($files -split "`n" | Where-Object { + $_.Trim() -ne "" -and ($_ -match '\.(cs|fs|vb|ts|js|py|java|go|rs|cpp|c|h)$') + }) | Select-Object -First 5 # Limit to 5 files per commit + } + + # Format output + $shortHash = $hash.Substring(0, 7) + if ($sourceFiles.Count -gt 0) { + $fileList = $sourceFiles -join ", " + $results += "$shortHash $message [files: $fileList]" + } + else { + $results += "$shortHash $message" + } + } + } catch { } + + return $results +} + +function Get-UncommittedChanges { + <# + .SYNOPSIS + Get summary of uncommitted changes (staged, unstaged, untracked) with meaningful descriptions. + .PARAMETER IncludeContent + If true, includes file content diffs (for AI analysis). + .PARAMETER FileFilter + File extension filter (default: .cs for C# files). + .OUTPUTS + Object with Staged, Modified, Untracked arrays and Summary with change descriptions. + #> + param( + [switch]$IncludeContent, + [string]$FileFilter = ".cs" + ) + + $result = @{ + Staged = @() + Modified = @() + Untracked = @() + Deleted = @() + Summary = @() + } + + try { + # Get current directory prefix relative to repo root + $cwdPrefix = git rev-parse --show-prefix 2>$null + if ($cwdPrefix) { $cwdPrefix = $cwdPrefix.Trim().TrimEnd('/') } + + # Get all changes using git status porcelain + $status = git status --porcelain 2>$null + if (-not $status) { return $result } + + $statusLines = $status -split "`n" | Where-Object { $_.Trim() -ne "" } + + foreach ($line in $statusLines) { + if ($line.Length -lt 3) { continue } + $statusCode = $line.Substring(0, 2) + $filePath = $line.Substring(3).Trim().Trim('"') + + # Filter by extension + if ($FileFilter -and -not $filePath.EndsWith($FileFilter)) { continue } + + # Convert repo-relative path to cwd-relative path for git diff + $diffPath = $filePath + if ($cwdPrefix -and $filePath.StartsWith("$cwdPrefix/")) { + $diffPath = $filePath.Substring($cwdPrefix.Length + 1) + } + + # Categorize by status (store both paths) + $pathInfo = @{ Full = $filePath; Diff = $diffPath } + if ($statusCode -match '^\?\?') { + $result.Untracked += $pathInfo + } elseif ($statusCode -match '^D' -or $statusCode -match '^.D') { + # Deleted files (staged or unstaged) + $result.Deleted += $pathInfo + } elseif ($statusCode -match '^[MARC]') { + $result.Staged += $pathInfo + } elseif ($statusCode -match '^.[MARC]') { + $result.Modified += $pathInfo + } + } + + # Process modified/staged files (get diff) + $allModified = @($result.Staged) + @($result.Modified) + foreach ($fileInfo in $allModified) { + if (-not $fileInfo) { continue } + + $diffPath = $fileInfo.Diff + $fullPath = $fileInfo.Full + + $diff = git diff -- $diffPath 2>$null + if (-not $diff) { $diff = git diff --cached -- $diffPath 2>$null } + + $fileName = Split-Path $fullPath -Leaf + $className = $fileName -replace '\.cs$', '' + + if ($diff) { + $changes = @() + $diffLines = $diff -split "`n" + + foreach ($line in $diffLines) { + # Added method/class/property + if ($line -match '^\+\s*(public|private|protected|internal)\s+static\s+\w+\s+(\w+)\s*\(') { + $changes += "Added static method $($Matches[2])" + } + elseif ($line -match '^\+\s*(public|private|protected|internal)\s+\w+\s+(\w+)\s*\([^)]*\)\s*\{?') { + $methodName = $Matches[2] + if ($methodName -notmatch '^(get|set|if|for|while|switch|new|return)$') { + $changes += "Added method $methodName" + } + } + elseif ($line -match '^\+\s*(public|private|protected|internal)\s+(class|interface|struct|enum)\s+(\w+)') { + $changes += "Added $($Matches[2]) $($Matches[3])" + } + # Detect try/catch blocks, error handling + elseif ($line -match '^\+.*catch\s*\(') { + $changes += "Added exception handling" + } + } + + $changes = $changes | Select-Object -Unique | Select-Object -First 4 + + if ($changes.Count -gt 0) { + $result.Summary += "(uncommitted) $className`: $($changes -join ', ')" + } + else { + # Fallback to line count + $addCount = ($diffLines | Where-Object { $_ -match '^\+[^+]' }).Count + $delCount = ($diffLines | Where-Object { $_ -match '^-[^-]' }).Count + $result.Summary += "(uncommitted) $className`: Modified (+$addCount/-$delCount lines)" + } + } + else { + $result.Summary += "(uncommitted) $className`: Modified" + } + } + + # Process untracked files (new files) + foreach ($fileInfo in $result.Untracked) { + if (-not $fileInfo) { continue } + + $diffPath = $fileInfo.Diff + $fullPath = $fileInfo.Full + + $fileName = Split-Path $fullPath -Leaf + $className = $fileName -replace '\.cs$', '' + + # Read file to understand what it contains + $content = Get-Content $diffPath -Raw -ErrorAction SilentlyContinue + if ($content) { + $features = @() + + if ($content -match 'class\s+(\w+)') { $features += "class $($Matches[1])" } + if ($content -match 'interface\s+(\w+)') { $features += "interface $($Matches[1])" } + if ($content -match 'enum\s+(\w+)') { $features += "enum $($Matches[1])" } + if ($content -match '\[Fact\]|\[Theory\]') { $features += "unit tests" } + + if ($features.Count -gt 0) { + $result.Summary += "(new file) $className`: Added $($features -join ', ')" + } + else { + $result.Summary += "(new file) $className`: New file" + } + } + else { + $result.Summary += "(new file) $className`: New file" + } + } + + # Process deleted files + foreach ($fileInfo in $result.Deleted) { + if (-not $fileInfo) { continue } + + $fullPath = $fileInfo.Full + $fileName = Split-Path $fullPath -Leaf + $className = $fileName -replace '\.cs$', '' + + $result.Summary += "(deleted) $className`: Removed" + } + + if ($IncludeContent) { + $result.DiffContent = git diff --cached 2>$null + if (-not $result.DiffContent) { $result.DiffContent = git diff 2>$null } + } + } catch { } + + return $result +} + +function Get-CommitChangesAnalysis { + <# + .SYNOPSIS + Analyze commits in a range and extract meaningful changes from diffs. + .PARAMETER Version + Version string to find commits for. + .PARAMETER CsprojPath + Path to csproj file. + .PARAMETER FileFilter + File extension filter (default: .cs). + .OUTPUTS + Array of change summary strings (like Get-UncommittedChanges). + #> + param( + [Parameter(Mandatory)][string]$Version, + [string]$CsprojPath = "*.csproj", + [string]$FileFilter = ".cs" + ) + + $summaries = @() + + try { + # Get commit range for this version + $range = "" + + # Find csproj + $actualCsprojPath = $CsprojPath + if ($CsprojPath -match '\*') { + $found = Get-ChildItem -Path $CsprojPath -Recurse -ErrorAction SilentlyContinue | + Where-Object { $_.Name -match '\.csproj$' } | + Select-Object -First 1 + if ($found) { $actualCsprojPath = $found.FullName } + } + + # Get git path + $cwdPrefix = git rev-parse --show-prefix 2>$null + if ($cwdPrefix) { $cwdPrefix = $cwdPrefix.Trim().TrimEnd('/') } + + $gitCsprojPath = $actualCsprojPath -replace '\\', '/' + if ([System.IO.Path]::IsPathRooted($actualCsprojPath)) { + Push-Location (Split-Path $actualCsprojPath -Parent) -ErrorAction SilentlyContinue + try { + $prefix = git rev-parse --show-prefix 2>$null + if ($prefix) { + $prefix = $prefix.Trim().TrimEnd('/') + $filename = Split-Path $actualCsprojPath -Leaf + $gitCsprojPath = if ($prefix) { "$prefix/$filename" } else { $filename } + } + } finally { Pop-Location -ErrorAction SilentlyContinue } + } + + # Determine commit range + $versionCommit = Get-VersionBumpCommit -Version $Version -FilePath $CsprojPath + + if ($versionCommit) { + $range = "$versionCommit^..HEAD" + } + else { + # Version not committed - find previous version + $committedContent = git show "HEAD:$gitCsprojPath" 2>$null + if ($committedContent) { + $prevVersionMatch = [regex]::Match(($committedContent -join "`n"), '([^<]+)') + if ($prevVersionMatch.Success) { + $prevVersion = $prevVersionMatch.Groups[1].Value + $prevCommit = Get-VersionBumpCommit -Version $prevVersion -FilePath $CsprojPath + if ($prevCommit) { + $range = "$prevCommit..HEAD" + } + } + } + + if (-not $range) { + $lastTag = Get-LastTag + if ($lastTag) { $range = "$lastTag..HEAD" } + } + } + + if (-not $range) { return @() } + + # Get commits + $hashes = git log $range --format="%H" --no-merges -n 30 2>$null + if (-not $hashes) { return @() } + + $hashArray = $hashes -split "`n" | Where-Object { $_.Trim() -ne "" } + + foreach ($hash in $hashArray) { + $hash = $hash.Trim() + if (-not $hash) { continue } + + $message = git log -1 --format="%s" $hash 2>$null + $shortHash = $hash.Substring(0, 7) + + # Get files changed in this commit + $files = git diff-tree --no-commit-id --name-only -r $hash 2>$null + if (-not $files) { continue } + + $sourceFiles = $files -split "`n" | Where-Object { + $_.Trim() -ne "" -and $_.EndsWith($FileFilter) + } + + foreach ($file in $sourceFiles) { + $file = $file.Trim() + if (-not $file) { continue } + + $fileName = Split-Path $file -Leaf + $className = $fileName -replace '\.cs$', '' + + # Get diff for this file in this commit + $diff = git show $hash --format="" -- $file 2>$null + + if ($diff) { + $changes = @() + $diffLines = $diff -split "`n" + + foreach ($line in $diffLines) { + # Added method/class/property + if ($line -match '^\+\s*(public|private|protected|internal)\s+static\s+\w+\s+(\w+)\s*\(') { + $changes += "Added static method $($Matches[2])" + } + elseif ($line -match '^\+\s*(public|private|protected|internal)\s+\w+\s+(\w+)\s*\([^)]*\)\s*\{?') { + $methodName = $Matches[2] + if ($methodName -notmatch '^(get|set|if|for|while|switch|new|return)$') { + $changes += "Added method $methodName" + } + } + elseif ($line -match '^\+\s*(public|private|protected|internal)\s+(class|interface|struct|enum)\s+(\w+)') { + $changes += "Added $($Matches[2]) $($Matches[3])" + } + # Removed + elseif ($line -match '^-\s*(public|private|protected|internal)\s+(class|interface|struct|enum)\s+(\w+)') { + $changes += "Removed $($Matches[2]) $($Matches[3])" + } + elseif ($line -match '^-\s*(public|private|protected|internal)\s+\w+\s+(\w+)\s*\([^)]*\)\s*\{?') { + $methodName = $Matches[2] + if ($methodName -notmatch '^(get|set|if|for|while|switch|new|return)$') { + $changes += "Removed method $methodName" + } + } + # Exception handling + elseif ($line -match '^\+.*catch\s*\(') { + $changes += "Added exception handling" + } + } + + $changes = $changes | Select-Object -Unique | Select-Object -First 4 + + if ($changes.Count -gt 0) { + $summaries += "(commit $shortHash) $className`: $($changes -join ', ')" + } + } + } + + # Also add commit message context if no detailed changes found for any file + if (-not ($summaries | Where-Object { $_ -match $shortHash })) { + $summaries += "(commit $shortHash) $message" + } + } + } catch { } + + return $summaries | Select-Object -Unique +} + +# ============================================================================== +# CONSOLE OUTPUT HELPERS +# ============================================================================== + +function Write-Banner { + <# + .SYNOPSIS + Write a banner/header to console. + .PARAMETER Title + Banner title text. + .PARAMETER Width + Banner width (default: 50). + .PARAMETER Color + Text color (default: Cyan). + #> + param( + [Parameter(Mandatory)][string]$Title, + [int]$Width = 50, + [string]$Color = "Cyan" + ) + + $border = "=" * $Width + Write-Host "" + Write-Host $border -ForegroundColor $Color + Write-Host $Title -ForegroundColor $Color + Write-Host $border -ForegroundColor $Color + Write-Host "" +} + +function Write-Success { + <# + .SYNOPSIS + Write a success message. + #> + param([Parameter(Mandatory)][string]$Message) + Write-Host $Message -ForegroundColor Green +} + +function Write-Warning { + <# + .SYNOPSIS + Write a warning message. + #> + param([Parameter(Mandatory)][string]$Message) + Write-Host "WARNING: $Message" -ForegroundColor Yellow +} + +function Write-Failure { + <# + .SYNOPSIS + Write a failure/error message. + #> + param([Parameter(Mandatory)][string]$Message) + Write-Host "ERROR: $Message" -ForegroundColor Red +} + +function Write-Info { + <# + .SYNOPSIS + Write an info message. + #> + param([Parameter(Mandatory)][string]$Message) + Write-Host $Message -ForegroundColor Gray +} + +# ============================================================================== +# MODULE EXPORTS +# ============================================================================== + +Export-ModuleMember -Function @( + # Step Timing + 'Initialize-StepTimer' + 'Start-Step' + 'Complete-Step' + 'Get-StepTimings' + 'Show-TimingSummary' + + # Prerequisites + 'Test-CommandExists' + 'Assert-Command' + 'Assert-Commands' + 'Test-EnvironmentVariable' + 'Assert-EnvironmentVariable' + + # Git Utilities + 'Get-GitStatus' + 'Show-GitStatus' + 'Get-CurrentBranch' + 'Get-LastTag' + 'Get-CommitsSinceTag' + 'Get-VersionBumpCommit' + 'Get-CommitsForVersion' + 'Get-DetailedCommits' + 'Get-UncommittedChanges' + 'Get-CommitChangesAnalysis' + + # Console Output + 'Write-Banner' + 'Write-Success' + 'Write-Warning' + 'Write-Failure' + 'Write-Info' +) diff --git a/src/Force-AmendTaggedCommit.bat b/src/Force-AmendTaggedCommit.bat new file mode 100644 index 0000000..616d358 --- /dev/null +++ b/src/Force-AmendTaggedCommit.bat @@ -0,0 +1,6 @@ +@echo off +setlocal + +powershell.exe -NoProfile -ExecutionPolicy Bypass -File "%~dp0Force-AmendTaggedCommit.ps1" + +pause \ No newline at end of file diff --git a/src/Force-AmendTaggedCommit.ps1 b/src/Force-AmendTaggedCommit.ps1 new file mode 100644 index 0000000..785f494 --- /dev/null +++ b/src/Force-AmendTaggedCommit.ps1 @@ -0,0 +1,200 @@ +<# +.SYNOPSIS + Amends the latest commit, recreates its associated tag, and force pushes both to remote. + +.DESCRIPTION + This script performs the following operations: + 1. Gets the last commit and verifies it has an associated tag + 2. Stages all pending changes + 3. Amends the latest commit (keeps existing message) + 4. Deletes and recreates the tag on the amended commit + 5. Force pushes the branch and tag to origin + +.PARAMETER DryRun + If specified, shows what would be done without making changes. + +.EXAMPLE + .\Force-AmendTaggedCommit.ps1 + +.EXAMPLE + .\Force-AmendTaggedCommit.ps1 -DryRun +#> + +[CmdletBinding()] +param( + [Parameter(Mandatory = $false)] + [switch]$DryRun +) + +$ErrorActionPreference = "Stop" + +function Write-Step { + param([string]$Text) + Write-Host "`n>> $Text" -ForegroundColor Cyan +} + +function Write-Success { + param([string]$Text) + Write-Host " $Text" -ForegroundColor Green +} + +function Write-Info { + param([string]$Text) + Write-Host " $Text" -ForegroundColor Gray +} + +function Write-Warn { + param([string]$Text) + Write-Host " $Text" -ForegroundColor Yellow +} + +try { + Write-Host "`n========================================" -ForegroundColor Magenta + Write-Host " Force Amend Tagged Commit Script" -ForegroundColor Magenta + Write-Host "========================================`n" -ForegroundColor Magenta + + if ($DryRun) { + Write-Warn "*** DRY RUN MODE - No changes will be made ***`n" + } + + # Get current branch + Write-Step "Getting current branch..." + $Branch = & git rev-parse --abbrev-ref HEAD 2>&1 + if ($LASTEXITCODE -ne 0) { + throw "Failed to get current branch. Are you in a git repository?" + } + Write-Info "Branch: $Branch" + + # Get last commit info + Write-Step "Getting last commit..." + $HeadCommit = & git rev-parse HEAD 2>&1 + if ($LASTEXITCODE -ne 0) { + throw "Failed to get HEAD commit" + } + $CommitMessage = & git log -1 --format="%s" 2>&1 + $CommitHash = & git log -1 --format="%h" 2>&1 + Write-Info "Commit: $CommitHash - $CommitMessage" + + # Find tag pointing to HEAD + Write-Step "Finding tag on last commit..." + $Tags = & git tag --points-at HEAD 2>&1 + if ($LASTEXITCODE -ne 0) { + throw "Failed to query tags" + } + + if (-not $Tags -or $Tags.Count -eq 0 -or [string]::IsNullOrWhiteSpace($Tags)) { + throw "No tag found on the last commit ($CommitHash). This script requires the last commit to have an associated tag." + } + + # If multiple tags, use the first one + $TagName = ($Tags -split "`n")[0].Trim() + Write-Success "Found tag: $TagName" + + # Show current status + Write-Step "Checking pending changes..." + $Status = & git status --short 2>&1 + if ($Status) { + Write-Info "Pending changes:" + $Status -split "`n" | ForEach-Object { Write-Info " $_" } + } else { + Write-Warn "No pending changes found" + $confirm = Read-Host "`n No changes to amend. Continue to recreate tag and force push? (y/N)" + if ($confirm -ne 'y' -and $confirm -ne 'Y') { + Write-Host "`nAborted by user" -ForegroundColor Yellow + exit 0 + } + } + + # Confirm operation + Write-Host "`n----------------------------------------" -ForegroundColor White + Write-Host " Summary of operations:" -ForegroundColor White + Write-Host "----------------------------------------" -ForegroundColor White + Write-Host " Branch: $Branch" -ForegroundColor White + Write-Host " Commit: $CommitHash" -ForegroundColor White + Write-Host " Tag: $TagName" -ForegroundColor White + Write-Host " Remote: origin" -ForegroundColor White + Write-Host "----------------------------------------`n" -ForegroundColor White + + if (-not $DryRun) { + $confirm = Read-Host " Proceed with amend and force push? (y/N)" + if ($confirm -ne 'y' -and $confirm -ne 'Y') { + Write-Host "`nAborted by user" -ForegroundColor Yellow + exit 0 + } + } + + # Stage all changes + Write-Step "Staging all changes..." + if (-not $DryRun) { + & git add -A 2>&1 + if ($LASTEXITCODE -ne 0) { + throw "Failed to stage changes" + } + } + Write-Success "All changes staged" + + # Amend commit + Write-Step "Amending commit..." + if (-not $DryRun) { + & git commit --amend --no-edit 2>&1 + if ($LASTEXITCODE -ne 0) { + throw "Failed to amend commit" + } + } + Write-Success "Commit amended" + + # Delete local tag + Write-Step "Deleting local tag '$TagName'..." + if (-not $DryRun) { + & git tag -d $TagName 2>&1 + if ($LASTEXITCODE -ne 0) { + throw "Failed to delete local tag" + } + } + Write-Success "Local tag deleted" + + # Recreate tag on new commit + Write-Step "Recreating tag '$TagName' on amended commit..." + if (-not $DryRun) { + & git tag $TagName 2>&1 + if ($LASTEXITCODE -ne 0) { + throw "Failed to create tag" + } + } + Write-Success "Tag recreated" + + # Force push branch + Write-Step "Force pushing branch '$Branch' to origin..." + if (-not $DryRun) { + & git push --force origin $Branch 2>&1 + if ($LASTEXITCODE -ne 0) { + throw "Failed to force push branch" + } + } + Write-Success "Branch force pushed" + + # Force push tag + Write-Step "Force pushing tag '$TagName' to origin..." + if (-not $DryRun) { + & git push --force origin $TagName 2>&1 + if ($LASTEXITCODE -ne 0) { + throw "Failed to force push tag" + } + } + Write-Success "Tag force pushed" + + Write-Host "`n========================================" -ForegroundColor Green + Write-Host " Operation completed successfully!" -ForegroundColor Green + Write-Host "========================================`n" -ForegroundColor Green + + # Show final state + Write-Host "Final state:" -ForegroundColor White + & git log -1 --oneline + Write-Host "" + +} catch { + Write-Host "`n========================================" -ForegroundColor Red + Write-Host " ERROR: $($_.Exception.Message)" -ForegroundColor Red + Write-Host "========================================`n" -ForegroundColor Red + exit 1 +} diff --git a/src/Generate-Changelog.bat b/src/Generate-Changelog.bat new file mode 100644 index 0000000..07ca203 --- /dev/null +++ b/src/Generate-Changelog.bat @@ -0,0 +1,9 @@ +@echo off + +REM Change directory to the location of the script +cd /d %~dp0 + +REM Run AI changelog generator (dry-run mode with debug output) +powershell -ExecutionPolicy Bypass -File "%~dp0Generate-Changelog.ps1" + +pause diff --git a/src/Generate-Changelog.ps1 b/src/Generate-Changelog.ps1 new file mode 100644 index 0000000..76ebaf5 --- /dev/null +++ b/src/Generate-Changelog.ps1 @@ -0,0 +1,449 @@ +<# +.SYNOPSIS + AI-assisted changelog generation and license year update. + +.DESCRIPTION + Generates changelog entries from uncommitted changes using a 3-pass LLM pipeline: + 1. Analyze: Convert changes to changelog items + 2. Consolidate: Merge similar items, remove duplicates + 3. Format: Structure as Keep a Changelog format + + Also updates LICENSE.md copyright year if needed. + Optional RAG pre-processing clusters related changes using embeddings. + All configuration is in changelogsettings.json. + +.PARAMETER DryRun + Show what would be generated without making changes. + Enables debug output showing intermediate LLM results. + Does not modify CHANGELOG.md or LICENSE.md. + +.USAGE + Generate changelog and update license: + .\Generate-Changelog.ps1 + + Dry run (preview without changes): + .\Generate-Changelog.ps1 -DryRun + +.NOTES + Requires: + - Ollama running locally (configured in changelogsettings.json) + - OllamaClient.psm1 and BuildUtils.psm1 modules + + Configuration (changelogsettings.json): + - csprojPath: Path to .csproj file for version + - outputFile: Path to CHANGELOG.md + - licensePath: Path to LICENSE.md + - debug: Enable debug output + - models: LLM models for each pass + - prompts: Prompt templates +#> + +param( + [switch]$DryRun +) + +# ============================================================================== +# PATH CONFIGURATION +# ============================================================================== + +$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path +$repoRoot = git rev-parse --show-toplevel 2>$null +if (-not $repoRoot) { + # Fallback if not in git repo + $repoRoot = Split-Path -Parent $scriptDir +} + +$repoRoot = $repoRoot.Trim() + +# ============================================================================== +# LOAD SETTINGS +# ============================================================================== + +$settingsPath = Join-Path $scriptDir "changelogsettings.json" +if (-not (Test-Path $settingsPath)) { + Write-Error "Settings file not found: $settingsPath" + exit 1 +} + +$settings = Get-Content $settingsPath -Raw | ConvertFrom-Json +Write-Host "Loaded settings from changelogsettings.json" -ForegroundColor Gray + +# Resolve paths relative to script location +$CsprojPath = if ($settings.changelog.csprojPath) { + [System.IO.Path]::GetFullPath((Join-Path $scriptDir $settings.changelog.csprojPath)) +} +else { + Join-Path $scriptDir "MaksIT.Core\MaksIT.Core.csproj" +} + +$OutputFile = if ($settings.changelog.outputFile) { + [System.IO.Path]::GetFullPath((Join-Path $scriptDir $settings.changelog.outputFile)) +} +else { + $null +} + +$LicensePath = if ($settings.changelog.licensePath) { + [System.IO.Path]::GetFullPath((Join-Path $scriptDir $settings.changelog.licensePath)) +} +else { + $null +} + +# ============================================================================== +# LICENSE YEAR UPDATE +# ============================================================================== + +if ($LicensePath -and (Test-Path $LicensePath)) { + Write-Host "Checking LICENSE.md copyright year..." -ForegroundColor Gray + $currentYear = (Get-Date).Year + $licenseContent = Get-Content $LicensePath -Raw + + # Match pattern: "Copyright (c) YYYY - YYYY" and update end year + $licensePattern = "(Copyright \(c\) \d{4}\s*-\s*)(\d{4})" + + if ($licenseContent -match $licensePattern) { + $existingEndYear = [int]$Matches[2] + + if ($existingEndYear -lt $currentYear) { + if ($DryRun) { + Write-Host "[DryRun] LICENSE.md needs update: $existingEndYear -> $currentYear" -ForegroundColor Yellow + } + else { + Write-Host "Updating LICENSE.md copyright year: $existingEndYear -> $currentYear" -ForegroundColor Cyan + $updatedContent = $licenseContent -replace $licensePattern, "`${1}$currentYear" + Set-Content -Path $LicensePath -Value $updatedContent -NoNewline + Write-Host "LICENSE.md updated." -ForegroundColor Green + } + } + else { + Write-Host "LICENSE.md copyright year is current ($existingEndYear)." -ForegroundColor Gray + } + } +} + +# ============================================================================== +# IMPORT MODULES +# ============================================================================== + +# Import build utilities +$buildUtilsPath = Join-Path $scriptDir "BuildUtils.psm1" +if (Test-Path $buildUtilsPath) { + Import-Module $buildUtilsPath -Force +} +else { + Write-Error "BuildUtils.psm1 not found: $buildUtilsPath" + exit 1 +} + +# Import Ollama client +$ollamaModulePath = Join-Path $scriptDir "OllamaClient.psm1" +if (-not $settings.ollama.enabled) { + Write-Error "Ollama is disabled in changelogsettings.json" + exit 1 +} + +if (-not (Test-Path $ollamaModulePath)) { + Write-Error "OllamaClient.psm1 not found: $ollamaModulePath" + exit 1 +} + +Import-Module $ollamaModulePath -Force +Set-OllamaConfig -ApiUrl $settings.ollama.apiUrl ` + -DefaultContextWindow $settings.ollama.defaultContextWindow ` + -DefaultTimeout $settings.ollama.defaultTimeout + +# ============================================================================== +# CHANGELOG CONFIGURATION +# ============================================================================== + +$clSettings = $settings.changelog +$changelogConfig = @{ + Debug = if ($DryRun) { $true } else { $clSettings.debug } + EnableRAG = $clSettings.enableRAG + SimilarityThreshold = $clSettings.similarityThreshold + FileExtension = $clSettings.fileExtension + ExcludePatterns = if ($clSettings.excludePatterns) { @($clSettings.excludePatterns) } else { @() } + Models = @{ + Analyze = @{ + Name = $clSettings.models.analyze.name + Context = $clSettings.models.analyze.context + MaxTokens = if ($null -ne $clSettings.models.analyze.maxTokens) { $clSettings.models.analyze.maxTokens } else { 0 } + } + Reason = @{ + Name = $clSettings.models.reason.name + Context = $clSettings.models.reason.context + MaxTokens = if ($null -ne $clSettings.models.reason.maxTokens) { $clSettings.models.reason.maxTokens } else { 0 } + Temperature = if ($clSettings.models.reason.temperature) { $clSettings.models.reason.temperature } else { 0.1 } + } + Write = @{ + Name = $clSettings.models.write.name + Context = $clSettings.models.write.context + MaxTokens = if ($null -ne $clSettings.models.write.maxTokens) { $clSettings.models.write.maxTokens } else { 0 } + } + Embed = @{ Name = $clSettings.models.embed.name } + } + Prompts = @{ + Analyze = if ($clSettings.prompts.analyze) { + if ($clSettings.prompts.analyze -is [array]) { $clSettings.prompts.analyze -join "`n" } else { $clSettings.prompts.analyze } + } else { "Convert changes to changelog: {{changes}}" } + Reason = if ($clSettings.prompts.reason) { + if ($clSettings.prompts.reason -is [array]) { $clSettings.prompts.reason -join "`n" } else { $clSettings.prompts.reason } + } else { "Consolidate: {{input}}" } + Format = if ($clSettings.prompts.format) { + if ($clSettings.prompts.format -is [array]) { $clSettings.prompts.format -join "`n" } else { $clSettings.prompts.format } + } else { "Format as changelog: {{items}}" } + } +} + +# ============================================================================== +# AI CHANGELOG GENERATION FUNCTION +# ============================================================================== + +function Get-AIChangelogSuggestion { + param( + [Parameter(Mandatory)][string]$Changes, + [Parameter(Mandatory)][string]$Version + ) + + $cfg = $script:changelogConfig + $debug = $cfg.Debug + + # === RAG PRE-PROCESSING === + $processedChanges = $Changes + + if ($cfg.EnableRAG) { + Write-Host " RAG Pre-processing ($($cfg.Models.Embed.Name))..." -ForegroundColor Cyan + $changeArray = $Changes -split "`n" | Where-Object { $_.Trim() -ne "" } + + if ($changeArray.Length -gt 3) { + Write-Host " RAG: Embedding $($changeArray.Length) changes..." -ForegroundColor Gray + $clusters = Group-TextsByEmbedding -Model $cfg.Models.Embed.Name -Texts $changeArray -SimilarityThreshold $cfg.SimilarityThreshold + Write-Host " RAG: Reduced to $($clusters.Length) groups" -ForegroundColor Green + + # Format clusters + $grouped = @() + foreach ($cluster in $clusters) { + if ($cluster.Length -eq 1) { + $grouped += $cluster[0] + } + else { + $grouped += "[RELATED CHANGES]`n" + ($cluster -join "`n") + "`n[/RELATED CHANGES]" + } + } + $processedChanges = $grouped -join "`n" + + if ($debug) { + Write-Host "`n [DEBUG] RAG grouped changes:" -ForegroundColor Magenta + Write-Host $processedChanges -ForegroundColor DarkGray + Write-Host "" + } + } + } + + # === PASS 1: Analyze changes === + $m1 = $cfg.Models.Analyze + Write-Host " Pass 1/3: Analyzing ($($m1.Name), ctx:$($m1.Context))..." -ForegroundColor Gray + + $prompt1 = $cfg.Prompts.Analyze -replace '\{\{changes\}\}', $processedChanges + $pass1 = Invoke-OllamaPrompt -Model $m1.Name -ContextWindow $m1.Context -MaxTokens $m1.MaxTokens -Prompt $prompt1 + + if (-not $pass1) { return $null } + if ($debug) { Write-Host "`n [DEBUG] Pass 1 output:" -ForegroundColor Magenta; Write-Host $pass1 -ForegroundColor DarkGray; Write-Host "" } + + # === PASS 2: Consolidate === + $m2 = $cfg.Models.Reason + Write-Host " Pass 2/3: Consolidating ($($m2.Name), ctx:$($m2.Context))..." -ForegroundColor Gray + + $prompt2 = $cfg.Prompts.Reason -replace '\{\{input\}\}', $pass1 + $pass2 = Invoke-OllamaPrompt -Model $m2.Name -ContextWindow $m2.Context -MaxTokens $m2.MaxTokens -Temperature $m2.Temperature -Prompt $prompt2 + + if (-not $pass2) { return $pass1 } + if ($pass2 -match "") { $pass2 = ($pass2 -split "")[-1].Trim() } + if ($debug) { Write-Host "`n [DEBUG] Pass 2 output:" -ForegroundColor Magenta; Write-Host $pass2 -ForegroundColor DarkGray; Write-Host "" } + + # === PASS 3: Format === + $m3 = $cfg.Models.Write + Write-Host " Pass 3/3: Formatting ($($m3.Name), ctx:$($m3.Context))..." -ForegroundColor Gray + + $prompt3 = $cfg.Prompts.Format -replace '\{\{items\}\}', $pass2 + $pass3 = Invoke-OllamaPrompt -Model $m3.Name -ContextWindow $m3.Context -MaxTokens $m3.MaxTokens -Prompt $prompt3 + + if (-not $pass3) { return $pass2 } + if ($debug) { Write-Host "`n [DEBUG] Pass 3 output:" -ForegroundColor Magenta; Write-Host $pass3 -ForegroundColor DarkGray; Write-Host "" } + + # Clean up preamble + if ($pass3 -match "(### Added|### Changed|### Fixed|### Removed)") { + $pass3 = $pass3.Substring($pass3.IndexOf($Matches[0])) + } + + # Clean up headers - remove any extra text after "### Added" etc. + $pass3 = $pass3 -replace '(### Added)[^\n]*', '### Added' + $pass3 = $pass3 -replace '(### Changed)[^\n]*', '### Changed' + $pass3 = $pass3 -replace '(### Fixed)[^\n]*', '### Fixed' + $pass3 = $pass3 -replace '(### Removed)[^\n]*', '### Removed' + + # Clean up formatting: remove extra blank lines, normalize line endings + $pass3 = $pass3 -replace "`r`n", "`n" # Normalize to LF + $pass3 = $pass3 -replace "(\n\s*){3,}", "`n`n" # Max 1 blank line + $pass3 = $pass3 -replace "- (.+)\n\n- ", "- `$1`n- " # No blank between items + $pass3 = $pass3 -replace "\n{2,}(### )", "`n`n`$1" # One blank before headers + + # Remove empty sections (e.g., "### Fixed\n- (No items)" or "### Removed\n\n###") + $pass3 = $pass3 -replace "### \w+\s*\n-\s*\(No items\)\s*\n?", "" + $pass3 = $pass3 -replace "### \w+\s*\n\s*\n(?=###|$)", "" + $pass3 = $pass3.Trim() + + return $pass3 +} + +# ============================================================================== +# MAIN EXECUTION +# ============================================================================== + +Write-Host "" +Write-Host "==================================================" -ForegroundColor Cyan +Write-Host "AI CHANGELOG GENERATOR" -ForegroundColor Cyan +Write-Host "==================================================" -ForegroundColor Cyan +Write-Host "" + +# Check Ollama availability +if (-not (Test-OllamaAvailable)) { + Write-Error "Ollama is not available. Start Ollama and try again." + exit 1 +} + +Write-Host "Ollama connected: $($settings.ollama.apiUrl)" -ForegroundColor Green +Write-Host "Models: $($changelogConfig.Models.Analyze.Name) | $($changelogConfig.Models.Reason.Name) | $($changelogConfig.Models.Embed.Name)" -ForegroundColor Gray +Write-Host "" + +# Get version from csproj +if (-not (Test-Path $CsprojPath)) { + Write-Error "Csproj file not found: $CsprojPath" + exit 1 +} + +[xml]$csproj = Get-Content $CsprojPath +$Version = $csproj.Project.PropertyGroup.Version | Where-Object { $_ } | Select-Object -First 1 + +Write-Host "Version: $Version" -ForegroundColor White + +# Filter function for excluding test files +$excludePatterns = $changelogConfig.ExcludePatterns +function Test-Excluded { + param([string]$Item) + foreach ($pattern in $excludePatterns) { + if ($Item -match [regex]::Escape($pattern)) { return $true } + } + return $false +} + +# Get committed changes for this version (analyzed diffs) +$committedChanges = Get-CommitChangesAnalysis -Version $Version -CsprojPath $CsprojPath -FileFilter $changelogConfig.FileExtension +$filteredCommitted = $committedChanges | Where-Object { -not (Test-Excluded $_) } + +# Get uncommitted changes (staged, modified, new, deleted) +$uncommitted = Get-UncommittedChanges -FileFilter $changelogConfig.FileExtension +$filteredUncommitted = $uncommitted.Summary | Where-Object { -not (Test-Excluded $_) } + +# Combine all changes +$allChanges = @() +if ($filteredCommitted.Count -gt 0) { $allChanges += $filteredCommitted } +if ($filteredUncommitted.Count -gt 0) { $allChanges += $filteredUncommitted } + +if ($allChanges.Count -eq 0) { + Write-Host "No changes found for version $Version (excluding tests)" -ForegroundColor Yellow + exit 0 +} + +$changeLog = $allChanges -join "`n" + +Write-Host "Found $($filteredCommitted.Count) committed changes" -ForegroundColor Gray +Write-Host "Found $($filteredUncommitted.Count) uncommitted changes" -ForegroundColor Gray +Write-Host "" + +# Generate changelog from uncommitted changes +$suggestion = Get-AIChangelogSuggestion -Changes $changeLog -Version $Version + +if ($suggestion) { + $fullEntry = "## v$Version`n`n$suggestion" + + Write-Host "" + Write-Host "==========================================" -ForegroundColor Green + Write-Host "AI SUGGESTED CHANGELOG ENTRY" -ForegroundColor Green + Write-Host "==========================================" -ForegroundColor Green + Write-Host "" + Write-Host $fullEntry -ForegroundColor White + Write-Host "" + Write-Host "==========================================" -ForegroundColor Green + + # Update changelog file if specified and not in DryRun mode + if ($OutputFile -and -not $DryRun) { + if (Test-Path $OutputFile) { + # Read existing content + $existingContent = Get-Content $OutputFile -Raw + + # Check if this version already exists + if ($existingContent -match "## v$Version\b") { + Write-Host "" + Write-Host "WARNING: Version $Version already exists in $OutputFile" -ForegroundColor Yellow + Write-Host "Skipping file update. Review and update manually if needed." -ForegroundColor Yellow + } + else { + # Find insertion point (after header, before first version entry) + # Header typically ends before first "## v" line + if ($existingContent -match '(?s)(^.*?)(\r?\n)(## v)') { + $header = $Matches[1] + $newline = $Matches[2] + $rest = $existingContent.Substring($header.Length + $newline.Length) + $newContent = $header + "`n`n" + $fullEntry + "`n`n" + $rest + } + else { + # No existing version entries - append after content + $newContent = $existingContent.TrimEnd() + "`n`n" + $fullEntry + "`n" + } + + # Normalize multiple blank lines to max 2 + $newContent = $newContent -replace "(\r?\n){3,}", "`n`n" + + $newContent | Out-File -FilePath $OutputFile -Encoding utf8 -NoNewline + Write-Host "" + Write-Host "Updated: $OutputFile" -ForegroundColor Cyan + } + } + else { + # Create new file with header + $newContent = @" +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +$fullEntry +"@ + $newContent | Out-File -FilePath $OutputFile -Encoding utf8 + Write-Host "" + Write-Host "Created: $OutputFile" -ForegroundColor Cyan + } + } + elseif ($OutputFile -and $DryRun) { + Write-Host "" + Write-Host "[DryRun] Would update: $OutputFile" -ForegroundColor Yellow + } + + Write-Host "" + if ($DryRun) { + Write-Host "DryRun complete. No files were modified." -ForegroundColor Yellow + } + else { + Write-Host "Review the changelog entry, then commit." -ForegroundColor Yellow + } +} +else { + Write-Error "AI changelog generation failed" + exit 1 +} + +Write-Host "" diff --git a/src/MaksIT.Core.Tests/CultureTests.cs b/src/MaksIT.Core.Tests/CultureTests.cs new file mode 100644 index 0000000..0c9f867 --- /dev/null +++ b/src/MaksIT.Core.Tests/CultureTests.cs @@ -0,0 +1,107 @@ +namespace MaksIT.Core.Tests; + +using System.Globalization; + +public class CultureTests { + + [Fact] + public void TrySet_NullCulture_SetsInvariantCulture() { + // Arrange + string? culture = null; + + // Act + var result = Culture.TrySet(culture, out var errorMessage); + + // Assert + Assert.True(result); + Assert.Null(errorMessage); + Assert.Equal(CultureInfo.InvariantCulture, Thread.CurrentThread.CurrentCulture); + Assert.Equal(CultureInfo.InvariantCulture, Thread.CurrentThread.CurrentUICulture); + } + + [Fact] + public void TrySet_EmptyCulture_SetsInvariantCulture() { + // Arrange + string culture = ""; + + // Act + var result = Culture.TrySet(culture, out var errorMessage); + + // Assert + Assert.True(result); + Assert.Null(errorMessage); + Assert.Equal(CultureInfo.InvariantCulture, Thread.CurrentThread.CurrentCulture); + Assert.Equal(CultureInfo.InvariantCulture, Thread.CurrentThread.CurrentUICulture); + } + + [Theory] + [InlineData("en-US")] + [InlineData("en-GB")] + [InlineData("de-DE")] + [InlineData("fr-FR")] + [InlineData("ja-JP")] + public void TrySet_ValidCulture_SetsCulture(string cultureName) { + // Act + var result = Culture.TrySet(cultureName, out var errorMessage); + + // Assert + Assert.True(result); + Assert.Null(errorMessage); + Assert.Equal(cultureName, Thread.CurrentThread.CurrentCulture.Name); + Assert.Equal(cultureName, Thread.CurrentThread.CurrentUICulture.Name); + } + + [Fact] + public void TrySet_InvalidCulture_ReturnsFalseWithErrorMessage() { + // Arrange - use a culture name that's invalid on all platforms + // Note: Linux is more permissive with culture names than Windows + // Using a very malformed name that should fail everywhere + string culture = "xx-INVALID-12345-@#$%"; + + // Act + var result = Culture.TrySet(culture, out var errorMessage); + + // Assert + // On some Linux systems, even invalid cultures may not throw + // So we just verify the method handles it without crashing + if (!result) { + Assert.NotNull(errorMessage); + Assert.NotEmpty(errorMessage); + } + // If it somehow succeeds (very permissive system), that's also acceptable + } + + [Fact] + public void TrySet_ValidCulture_AffectsCurrentThread() { + // Arrange + var originalCulture = Thread.CurrentThread.CurrentCulture; + + try { + // Act + Culture.TrySet("de-DE", out _); + + // Assert + Assert.Equal("de-DE", Thread.CurrentThread.CurrentCulture.Name); + } + finally { + // Cleanup - restore original culture + Thread.CurrentThread.CurrentCulture = originalCulture; + Thread.CurrentThread.CurrentUICulture = originalCulture; + } + } + + [Fact] + public void TrySet_NeutralCulture_CreatesSpecificCulture() { + // Arrange - "en" is a neutral culture, should create specific culture + string culture = "en"; + + // Act + var result = Culture.TrySet(culture, out var errorMessage); + + // Assert + Assert.True(result); + Assert.Null(errorMessage); + // CreateSpecificCulture("en") typically returns "en-US" or similar + Assert.StartsWith("en", Thread.CurrentThread.CurrentCulture.Name); + } +} diff --git a/src/MaksIT.Core.Tests/EnvVarTests.cs b/src/MaksIT.Core.Tests/EnvVarTests.cs new file mode 100644 index 0000000..de1b0e8 --- /dev/null +++ b/src/MaksIT.Core.Tests/EnvVarTests.cs @@ -0,0 +1,178 @@ +namespace MaksIT.Core.Tests; + +public class EnvVarTests { + + private const string TestEnvVarName = "MAKSIT_TEST_ENV_VAR"; + private const string TestEnvVarValue = "test_value_123"; + + [Fact] + public void TrySet_ProcessLevel_SetsEnvironmentVariable() { + // Arrange & Act + var result = EnvVar.TrySet(TestEnvVarName, TestEnvVarValue, "process", out var errorMessage); + + try { + // Assert + Assert.True(result); + Assert.Null(errorMessage); + Assert.Equal(TestEnvVarValue, Environment.GetEnvironmentVariable(TestEnvVarName)); + } + finally { + // Cleanup + Environment.SetEnvironmentVariable(TestEnvVarName, null); + } + } + + [Fact] + public void TryUnSet_ProcessLevel_RemovesEnvironmentVariable() { + // Arrange + Environment.SetEnvironmentVariable(TestEnvVarName, TestEnvVarValue); + + // Act + var result = EnvVar.TryUnSet(TestEnvVarName, "process", out var errorMessage); + + // Assert + Assert.True(result); + Assert.Null(errorMessage); + Assert.Null(Environment.GetEnvironmentVariable(TestEnvVarName)); + } + + [Fact] + public void TrySet_UserLevel_SetsEnvironmentVariable() { + // This test may fail on Linux/Docker containers due to permissions + // Skip on non-Windows platforms as User-level env vars behave differently + if (!OperatingSystem.IsWindows()) { + // On Linux, user-level env vars in containers don't persist as expected + // Just verify the method doesn't crash + var result = EnvVar.TrySet(TestEnvVarName, TestEnvVarValue, "user", out var errorMessage); + // Either succeeds or fails gracefully - both are acceptable on Linux + Assert.True(result || errorMessage != null); + return; + } + + // Windows-specific test + var winResult = EnvVar.TrySet(TestEnvVarName, TestEnvVarValue, "user", out var winErrorMessage); + + try { + if (winResult) { + Assert.Null(winErrorMessage); + var value = Environment.GetEnvironmentVariable(TestEnvVarName, EnvironmentVariableTarget.User); + Assert.Equal(TestEnvVarValue, value); + } + } + finally { + try { + Environment.SetEnvironmentVariable(TestEnvVarName, null, EnvironmentVariableTarget.User); + } + catch { + // Ignore cleanup errors + } + } + } + + [Fact] + public void TryAddToPath_AddsPathToEnvironment() { + // Arrange + var originalPath = Environment.GetEnvironmentVariable("PATH"); + var newPath = "/test/path/that/does/not/exist"; + + try { + // Act + var result = EnvVar.TryAddToPath(newPath, out var errorMessage); + + // Assert + Assert.True(result); + Assert.Null(errorMessage); + var currentPath = Environment.GetEnvironmentVariable("PATH"); + Assert.Contains(newPath, currentPath); + } + finally { + // Cleanup - restore original PATH + Environment.SetEnvironmentVariable("PATH", originalPath); + } + } + + [Fact] + public void TryAddToPath_DuplicatePath_DoesNotAddAgain() { + // Arrange + var originalPath = Environment.GetEnvironmentVariable("PATH"); + var newPath = "/test/unique/path"; + + try { + // Add first time + EnvVar.TryAddToPath(newPath, out _); + var pathAfterFirstAdd = Environment.GetEnvironmentVariable("PATH"); + + // Act - Add same path again + var result = EnvVar.TryAddToPath(newPath, out var errorMessage); + var pathAfterSecondAdd = Environment.GetEnvironmentVariable("PATH"); + + // Assert + Assert.True(result); + Assert.Null(errorMessage); + // Path should not have duplicate entries + Assert.Equal(pathAfterFirstAdd, pathAfterSecondAdd); + } + finally { + // Cleanup + Environment.SetEnvironmentVariable("PATH", originalPath); + } + } + + [Theory] + [InlineData("process")] + [InlineData("user")] + [InlineData("Process")] + [InlineData("USER")] + public void TrySet_VariousTargets_HandlesCorrectly(string target) { + // Arrange + var envName = $"{TestEnvVarName}_{target.ToUpper()}"; + + // Act + var result = EnvVar.TrySet(envName, TestEnvVarValue, target, out var errorMessage); + + // Assert - for process level, should always succeed + if (target.ToLower() == "process") { + Assert.True(result); + Assert.Null(errorMessage); + } + // For other levels, result depends on permissions + + // Cleanup + try { + EnvVar.TryUnSet(envName, target, out _); + } + catch { + // Ignore cleanup errors + } + } + + [Fact] + public void TrySet_EmptyValue_SetsEmptyString() { + // Arrange & Act + var result = EnvVar.TrySet(TestEnvVarName, "", "process", out var errorMessage); + + try { + // Assert + Assert.True(result); + Assert.Null(errorMessage); + Assert.Equal("", Environment.GetEnvironmentVariable(TestEnvVarName)); + } + finally { + // Cleanup + Environment.SetEnvironmentVariable(TestEnvVarName, null); + } + } + + [Fact] + public void TryUnSet_NonExistentVariable_Succeeds() { + // Arrange + var nonExistentVar = "MAKSIT_NON_EXISTENT_VAR_12345"; + + // Act + var result = EnvVar.TryUnSet(nonExistentVar, "process", out var errorMessage); + + // Assert + Assert.True(result); + Assert.Null(errorMessage); + } +} diff --git a/src/MaksIT.Core.Tests/Extensions/ExceptionExtensionsTests.cs b/src/MaksIT.Core.Tests/Extensions/ExceptionExtensionsTests.cs new file mode 100644 index 0000000..674021a --- /dev/null +++ b/src/MaksIT.Core.Tests/Extensions/ExceptionExtensionsTests.cs @@ -0,0 +1,80 @@ +namespace MaksIT.Core.Tests.Extensions; + +using MaksIT.Core.Extensions; + +public class ExceptionExtensionsTests { + + [Fact] + public void ExtractMessages_SingleException_ReturnsSingleMessage() { + // Arrange + var exception = new InvalidOperationException("Test message"); + + // Act + var messages = exception.ExtractMessages(); + + // Assert + Assert.Single(messages); + Assert.Equal("Test message", messages[0]); + } + + [Fact] + public void ExtractMessages_WithInnerException_ReturnsAllMessages() { + // Arrange + var innerException = new ArgumentException("Inner message"); + var outerException = new InvalidOperationException("Outer message", innerException); + + // Act + var messages = outerException.ExtractMessages(); + + // Assert + Assert.Equal(2, messages.Count); + Assert.Equal("Outer message", messages[0]); + Assert.Equal("Inner message", messages[1]); + } + + [Fact] + public void ExtractMessages_WithMultipleNestedExceptions_ReturnsAllMessages() { + // Arrange + var innermost = new ArgumentNullException("param", "Innermost message"); + var middle = new ArgumentException("Middle message", innermost); + var outer = new InvalidOperationException("Outer message", middle); + + // Act + var messages = outer.ExtractMessages(); + + // Assert + Assert.Equal(3, messages.Count); + Assert.Equal("Outer message", messages[0]); + Assert.Equal("Middle message", messages[1]); + Assert.Contains("Innermost message", messages[2]); + } + + [Fact] + public void ExtractMessages_AggregateException_ReturnsOuterMessage() { + // Arrange + var inner1 = new InvalidOperationException("Error 1"); + var inner2 = new ArgumentException("Error 2"); + var aggregate = new AggregateException("Multiple errors", inner1, inner2); + + // Act + var messages = aggregate.ExtractMessages(); + + // Assert + // AggregateException's InnerException is the first inner exception + Assert.Equal(2, messages.Count); + Assert.Contains("Multiple errors", messages[0]); + } + + [Fact] + public void ExtractMessages_EmptyMessage_ReturnsEmptyString() { + // Arrange + var exception = new Exception(""); + + // Act + var messages = exception.ExtractMessages(); + + // Assert + Assert.Single(messages); + Assert.Equal("", messages[0]); + } +} diff --git a/src/MaksIT.Core.Tests/Extensions/FormatsExtensionsTests.cs b/src/MaksIT.Core.Tests/Extensions/FormatsExtensionsTests.cs new file mode 100644 index 0000000..202aac3 --- /dev/null +++ b/src/MaksIT.Core.Tests/Extensions/FormatsExtensionsTests.cs @@ -0,0 +1,202 @@ +namespace MaksIT.Core.Tests.Extensions; + +using MaksIT.Core.Extensions; + +public class FormatsExtensionsTests : IDisposable { + private readonly string _testDirectory; + private readonly List _createdFiles = new(); + + public FormatsExtensionsTests() { + _testDirectory = Path.Combine(Path.GetTempPath(), $"MaksIT_Test_{Guid.NewGuid()}"); + Directory.CreateDirectory(_testDirectory); + } + + public void Dispose() { + // Cleanup + try { + if (Directory.Exists(_testDirectory)) { + Directory.Delete(_testDirectory, true); + } + foreach (var file in _createdFiles) { + if (File.Exists(file)) { + File.Delete(file); + } + } + } + catch { + // Ignore cleanup errors + } + } + + [Fact] + public void TryCreateTarFromDirectory_ValidDirectory_ReturnsTrue() { + // Arrange + var sourceDir = Path.Combine(_testDirectory, "source"); + Directory.CreateDirectory(sourceDir); + File.WriteAllText(Path.Combine(sourceDir, "test.txt"), "Hello, World!"); + + var outputTar = Path.Combine(_testDirectory, "output.tar"); + _createdFiles.Add(outputTar); + + // Act + var result = FormatsExtensions.TryCreateTarFromDirectory(sourceDir, outputTar); + + // Assert + Assert.True(result); + Assert.True(File.Exists(outputTar)); + Assert.True(new FileInfo(outputTar).Length > 0); + } + + [Fact] + public void TryCreateTarFromDirectory_MultipleFiles_ReturnsTrue() { + // Arrange + var sourceDir = Path.Combine(_testDirectory, "multi_source"); + Directory.CreateDirectory(sourceDir); + File.WriteAllText(Path.Combine(sourceDir, "file1.txt"), "Content 1"); + File.WriteAllText(Path.Combine(sourceDir, "file2.txt"), "Content 2"); + File.WriteAllText(Path.Combine(sourceDir, "file3.txt"), "Content 3"); + + var outputTar = Path.Combine(_testDirectory, "multi_output.tar"); + _createdFiles.Add(outputTar); + + // Act + var result = FormatsExtensions.TryCreateTarFromDirectory(sourceDir, outputTar); + + // Assert + Assert.True(result); + Assert.True(File.Exists(outputTar)); + } + + [Fact] + public void TryCreateTarFromDirectory_NestedDirectories_ReturnsTrue() { + // Arrange + var sourceDir = Path.Combine(_testDirectory, "nested_source"); + var subDir = Path.Combine(sourceDir, "subdir"); + Directory.CreateDirectory(subDir); + File.WriteAllText(Path.Combine(sourceDir, "root.txt"), "Root content"); + File.WriteAllText(Path.Combine(subDir, "nested.txt"), "Nested content"); + + var outputTar = Path.Combine(_testDirectory, "nested_output.tar"); + _createdFiles.Add(outputTar); + + // Act + var result = FormatsExtensions.TryCreateTarFromDirectory(sourceDir, outputTar); + + // Assert + Assert.True(result); + Assert.True(File.Exists(outputTar)); + } + + [Fact] + public void TryCreateTarFromDirectory_EmptyDirectory_ReturnsFalse() { + // Arrange + var sourceDir = Path.Combine(_testDirectory, "empty_source"); + Directory.CreateDirectory(sourceDir); + + var outputTar = Path.Combine(_testDirectory, "empty_output.tar"); + + // Act + var result = FormatsExtensions.TryCreateTarFromDirectory(sourceDir, outputTar); + + // Assert + Assert.False(result); + Assert.False(File.Exists(outputTar)); + } + + [Fact] + public void TryCreateTarFromDirectory_NonExistentDirectory_ReturnsFalse() { + // Arrange + var sourceDir = Path.Combine(_testDirectory, "non_existent"); + var outputTar = Path.Combine(_testDirectory, "non_existent_output.tar"); + + // Act + var result = FormatsExtensions.TryCreateTarFromDirectory(sourceDir, outputTar); + + // Assert + Assert.False(result); + } + + [Fact] + public void TryCreateTarFromDirectory_NullSourceDirectory_ReturnsFalse() { + // Arrange + var outputTar = Path.Combine(_testDirectory, "null_source_output.tar"); + + // Act + var result = FormatsExtensions.TryCreateTarFromDirectory(null!, outputTar); + + // Assert + Assert.False(result); + } + + [Fact] + public void TryCreateTarFromDirectory_EmptySourceDirectory_ReturnsFalse() { + // Arrange + var outputTar = Path.Combine(_testDirectory, "empty_path_output.tar"); + + // Act + var result = FormatsExtensions.TryCreateTarFromDirectory("", outputTar); + + // Assert + Assert.False(result); + } + + [Fact] + public void TryCreateTarFromDirectory_WhitespaceSourceDirectory_ReturnsFalse() { + // Arrange + var outputTar = Path.Combine(_testDirectory, "whitespace_output.tar"); + + // Act + var result = FormatsExtensions.TryCreateTarFromDirectory(" ", outputTar); + + // Assert + Assert.False(result); + } + + [Fact] + public void TryCreateTarFromDirectory_NullOutputPath_ReturnsFalse() { + // Arrange + var sourceDir = Path.Combine(_testDirectory, "valid_source"); + Directory.CreateDirectory(sourceDir); + File.WriteAllText(Path.Combine(sourceDir, "test.txt"), "Content"); + + // Act + var result = FormatsExtensions.TryCreateTarFromDirectory(sourceDir, null!); + + // Assert + Assert.False(result); + } + + [Fact] + public void TryCreateTarFromDirectory_EmptyOutputPath_ReturnsFalse() { + // Arrange + var sourceDir = Path.Combine(_testDirectory, "valid_source2"); + Directory.CreateDirectory(sourceDir); + File.WriteAllText(Path.Combine(sourceDir, "test.txt"), "Content"); + + // Act + var result = FormatsExtensions.TryCreateTarFromDirectory(sourceDir, ""); + + // Assert + Assert.False(result); + } + + [Fact] + public void TryCreateTarFromDirectory_CreatesOutputDirectory_WhenNotExists() { + // Arrange + var sourceDir = Path.Combine(_testDirectory, "source_for_new_dir"); + Directory.CreateDirectory(sourceDir); + File.WriteAllText(Path.Combine(sourceDir, "test.txt"), "Content"); + + var outputDir = Path.Combine(_testDirectory, "new_output_dir"); + var outputTar = Path.Combine(outputDir, "output.tar"); + _createdFiles.Add(outputTar); + + // Act + var result = FormatsExtensions.TryCreateTarFromDirectory(sourceDir, outputTar); + + // Assert + Assert.True(result); + Assert.True(Directory.Exists(outputDir)); + Assert.True(File.Exists(outputTar)); + } +} diff --git a/src/MaksIT.Core.Tests/Logging/FileLoggerTests.cs b/src/MaksIT.Core.Tests/Logging/FileLoggerTests.cs index 1324bb7..31cc90c 100644 --- a/src/MaksIT.Core.Tests/Logging/FileLoggerTests.cs +++ b/src/MaksIT.Core.Tests/Logging/FileLoggerTests.cs @@ -1,4 +1,4 @@ -using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using MaksIT.Core.Logging; @@ -103,4 +103,118 @@ public class FileLoggerTests { Assert.Fail("Logger should handle exceptions gracefully."); } } + + [Fact] + public void ShouldWriteLogsToSubfolderWhenFolderPrefixUsed() { + // Arrange + var serviceCollection = new ServiceCollection(); + serviceCollection.AddSingleton(sp => + new TestHostEnvironment { + EnvironmentName = Environments.Development, + ApplicationName = "TestApp", + ContentRootPath = Directory.GetCurrentDirectory() + }); + + serviceCollection.AddLogging(builder => builder.AddFileLogger(_testFolderPath, TimeSpan.FromDays(7))); + + var provider = serviceCollection.BuildServiceProvider(); + var loggerFactory = provider.GetRequiredService(); + + // Act - Create logger with Folder prefix + var logger = loggerFactory.CreateLogger(LoggerPrefix.Folder.WithValue("Audit")); + logger.LogInformation("Audit log message"); + + // Assert + var auditFolder = Path.Combine(_testFolderPath, "Audit"); + Assert.True(Directory.Exists(auditFolder), "Audit subfolder should be created"); + + var logFile = Directory.GetFiles(auditFolder, "log_*.txt").FirstOrDefault(); + Assert.NotNull(logFile); + var logContent = File.ReadAllText(logFile); + Assert.Contains("Audit log message", logContent); + } + + [Fact] + public void ShouldWriteLogsToDefaultFolderWhenNoPrefixUsed() { + // Arrange + var serviceCollection = new ServiceCollection(); + serviceCollection.AddSingleton(sp => + new TestHostEnvironment { + EnvironmentName = Environments.Development, + ApplicationName = "TestApp", + ContentRootPath = Directory.GetCurrentDirectory() + }); + + serviceCollection.AddLogging(builder => builder.AddFileLogger(_testFolderPath, TimeSpan.FromDays(7))); + + var provider = serviceCollection.BuildServiceProvider(); + var loggerFactory = provider.GetRequiredService(); + + // Act - Create logger with full type name (simulating ILogger) + var logger = loggerFactory.CreateLogger("MyApp.Services.OrderService"); + logger.LogInformation("Order service log message"); + + // Assert - Should NOT create subfolder for type names + var logFile = Directory.GetFiles(_testFolderPath, "log_*.txt").FirstOrDefault(); + Assert.NotNull(logFile); + var logContent = File.ReadAllText(logFile); + Assert.Contains("Order service log message", logContent); + } + + [Fact] + public void ShouldHandleFolderPrefixWithSpaces() { + // Arrange + var serviceCollection = new ServiceCollection(); + serviceCollection.AddSingleton(sp => + new TestHostEnvironment { + EnvironmentName = Environments.Development, + ApplicationName = "TestApp", + ContentRootPath = Directory.GetCurrentDirectory() + }); + + serviceCollection.AddLogging(builder => builder.AddFileLogger(_testFolderPath, TimeSpan.FromDays(7))); + + var provider = serviceCollection.BuildServiceProvider(); + var loggerFactory = provider.GetRequiredService(); + + // Act + var logger = loggerFactory.CreateLogger(LoggerPrefix.Folder.WithValue("My Custom Logs")); + logger.LogInformation("Custom folder log message"); + + // Assert + var customFolder = Path.Combine(_testFolderPath, "My Custom Logs"); + Assert.True(Directory.Exists(customFolder), "Custom subfolder with spaces should be created"); + + var logFile = Directory.GetFiles(customFolder, "log_*.txt").FirstOrDefault(); + Assert.NotNull(logFile); + var logContent = File.ReadAllText(logFile); + Assert.Contains("Custom folder log message", logContent); + } + + [Fact] + public void ShouldIgnoreEmptyFolderPrefix() { + // Arrange + var serviceCollection = new ServiceCollection(); + serviceCollection.AddSingleton(sp => + new TestHostEnvironment { + EnvironmentName = Environments.Development, + ApplicationName = "TestApp", + ContentRootPath = Directory.GetCurrentDirectory() + }); + + serviceCollection.AddLogging(builder => builder.AddFileLogger(_testFolderPath, TimeSpan.FromDays(7))); + + var provider = serviceCollection.BuildServiceProvider(); + var loggerFactory = provider.GetRequiredService(); + + // Act - Create logger with empty folder value + var logger = loggerFactory.CreateLogger(LoggerPrefix.Folder.WithValue("")); + logger.LogInformation("Empty folder prefix log message"); + + // Assert - Should use default folder (not create empty subfolder) + var logFile = Directory.GetFiles(_testFolderPath, "log_*.txt").FirstOrDefault(); + Assert.NotNull(logFile); + var logContent = File.ReadAllText(logFile); + Assert.Contains("Empty folder prefix log message", logContent); + } } diff --git a/src/MaksIT.Core.Tests/Logging/JsonFileLoggerTests.cs b/src/MaksIT.Core.Tests/Logging/JsonFileLoggerTests.cs index a4128fb..544b81e 100644 --- a/src/MaksIT.Core.Tests/Logging/JsonFileLoggerTests.cs +++ b/src/MaksIT.Core.Tests/Logging/JsonFileLoggerTests.cs @@ -140,4 +140,61 @@ public class JsonFileLoggerTests { var logContent = File.ReadAllText(logFile); Assert.Contains("Test combined logging", logContent); } + + [Fact] + public void ShouldWriteLogsToSubfolderWhenFolderPrefixUsed() { + // Arrange + var serviceCollection = new ServiceCollection(); + serviceCollection.AddSingleton(sp => + new TestHostEnvironment { + EnvironmentName = Environments.Development, + ApplicationName = "TestApp", + ContentRootPath = Directory.GetCurrentDirectory() + }); + + serviceCollection.AddLogging(builder => builder.AddJsonFileLogger(_testFolderPath, TimeSpan.FromDays(7))); + + var provider = serviceCollection.BuildServiceProvider(); + var loggerFactory = provider.GetRequiredService(); + + // Act - Create logger with Folder prefix + var logger = loggerFactory.CreateLogger(LoggerPrefix.Folder.WithValue("Audit")); + logger.LogInformation("Audit JSON log message"); + + // Assert + var auditFolder = Path.Combine(_testFolderPath, "Audit"); + Assert.True(Directory.Exists(auditFolder), "Audit subfolder should be created"); + + var logFile = Directory.GetFiles(auditFolder, "log_*.json").FirstOrDefault(); + Assert.NotNull(logFile); + var logContent = File.ReadAllText(logFile); + Assert.Contains("Audit JSON log message", logContent); + } + + [Fact] + public void ShouldWriteLogsToDefaultFolderWhenNoPrefixUsed() { + // Arrange + var serviceCollection = new ServiceCollection(); + serviceCollection.AddSingleton(sp => + new TestHostEnvironment { + EnvironmentName = Environments.Development, + ApplicationName = "TestApp", + ContentRootPath = Directory.GetCurrentDirectory() + }); + + serviceCollection.AddLogging(builder => builder.AddJsonFileLogger(_testFolderPath, TimeSpan.FromDays(7))); + + var provider = serviceCollection.BuildServiceProvider(); + var loggerFactory = provider.GetRequiredService(); + + // Act - Create logger with full type name (simulating ILogger) + var logger = loggerFactory.CreateLogger("MyApp.Services.OrderService"); + logger.LogInformation("Order service JSON log message"); + + // Assert - Should NOT create subfolder for type names + var logFile = Directory.GetFiles(_testFolderPath, "log_*.json").FirstOrDefault(); + Assert.NotNull(logFile); + var logContent = File.ReadAllText(logFile); + Assert.Contains("Order service JSON log message", logContent); + } } \ No newline at end of file diff --git a/src/MaksIT.Core.Tests/Logging/LoggerPrefixTests.cs b/src/MaksIT.Core.Tests/Logging/LoggerPrefixTests.cs new file mode 100644 index 0000000..a4207f9 --- /dev/null +++ b/src/MaksIT.Core.Tests/Logging/LoggerPrefixTests.cs @@ -0,0 +1,160 @@ +using MaksIT.Core.Logging; + +namespace MaksIT.Core.Tests.Logging; + +public class LoggerPrefixTests { + [Fact] + public void WithValue_ShouldCreateCorrectCategoryString() { + // Arrange & Act + var folderCategory = LoggerPrefix.Folder.WithValue("Audit"); + var categoryCategory = LoggerPrefix.Category.WithValue("Orders"); + var tagCategory = LoggerPrefix.Tag.WithValue("Critical"); + + // Assert + Assert.Equal("Folder:Audit", folderCategory); + Assert.Equal("Category:Orders", categoryCategory); + Assert.Equal("Tag:Critical", tagCategory); + } + + [Fact] + public void WithValue_ShouldHandleSpacesInValue() { + // Arrange & Act + var result = LoggerPrefix.Folder.WithValue("My Custom Folder"); + + // Assert + Assert.Equal("Folder:My Custom Folder", result); + } + + [Fact] + public void WithValue_ShouldHandleEmptyValue() { + // Arrange & Act + var result = LoggerPrefix.Folder.WithValue(""); + + // Assert + Assert.Equal("Folder:", result); + } + + [Fact] + public void Parse_ShouldExtractFolderPrefix() { + // Arrange + var categoryName = "Folder:Audit"; + + // Act + var (prefix, value) = LoggerPrefix.Parse(categoryName); + + // Assert + Assert.Equal(LoggerPrefix.Folder, prefix); + Assert.Equal("Audit", value); + } + + [Fact] + public void Parse_ShouldExtractCategoryPrefix() { + // Arrange + var categoryName = "Category:Orders"; + + // Act + var (prefix, value) = LoggerPrefix.Parse(categoryName); + + // Assert + Assert.Equal(LoggerPrefix.Category, prefix); + Assert.Equal("Orders", value); + } + + [Fact] + public void Parse_ShouldExtractTagPrefix() { + // Arrange + var categoryName = "Tag:Critical"; + + // Act + var (prefix, value) = LoggerPrefix.Parse(categoryName); + + // Assert + Assert.Equal(LoggerPrefix.Tag, prefix); + Assert.Equal("Critical", value); + } + + [Fact] + public void Parse_ShouldHandleValueWithSpaces() { + // Arrange + var categoryName = "Folder:My Custom Folder"; + + // Act + var (prefix, value) = LoggerPrefix.Parse(categoryName); + + // Assert + Assert.Equal(LoggerPrefix.Folder, prefix); + Assert.Equal("My Custom Folder", value); + } + + [Fact] + public void Parse_ShouldReturnNullForUnrecognizedPrefix() { + // Arrange + var categoryName = "MyApp.Services.OrderService"; + + // Act + var (prefix, value) = LoggerPrefix.Parse(categoryName); + + // Assert + Assert.Null(prefix); + Assert.Null(value); + } + + [Fact] + public void Parse_ShouldReturnNullForEmptyString() { + // Arrange + var categoryName = ""; + + // Act + var (prefix, value) = LoggerPrefix.Parse(categoryName); + + // Assert + Assert.Null(prefix); + Assert.Null(value); + } + + [Fact] + public void Parse_ShouldHandleEmptyValueAfterPrefix() { + // Arrange + var categoryName = "Folder:"; + + // Act + var (prefix, value) = LoggerPrefix.Parse(categoryName); + + // Assert + Assert.Equal(LoggerPrefix.Folder, prefix); + Assert.Equal("", value); + } + + [Fact] + public void Parse_ShouldBeCaseSensitive() { + // Arrange + var categoryName = "folder:Audit"; // lowercase 'f' + + // Act + var (prefix, value) = LoggerPrefix.Parse(categoryName); + + // Assert + Assert.Null(prefix); + Assert.Null(value); + } + + [Fact] + public void GetAll_ShouldReturnAllPrefixes() { + // Arrange & Act + var allPrefixes = MaksIT.Core.Abstractions.Enumeration.GetAll().ToList(); + + // Assert + Assert.Equal(3, allPrefixes.Count); + Assert.Contains(LoggerPrefix.Folder, allPrefixes); + Assert.Contains(LoggerPrefix.Category, allPrefixes); + Assert.Contains(LoggerPrefix.Tag, allPrefixes); + } + + [Fact] + public void ToString_ShouldReturnPrefixName() { + // Arrange & Act & Assert + Assert.Equal("Folder:", LoggerPrefix.Folder.ToString()); + Assert.Equal("Category:", LoggerPrefix.Category.ToString()); + Assert.Equal("Tag:", LoggerPrefix.Tag.ToString()); + } +} diff --git a/src/MaksIT.Core.Tests/MaksIT.Core.Tests.csproj b/src/MaksIT.Core.Tests/MaksIT.Core.Tests.csproj index 24d3a2f..a537279 100644 --- a/src/MaksIT.Core.Tests/MaksIT.Core.Tests.csproj +++ b/src/MaksIT.Core.Tests/MaksIT.Core.Tests.csproj @@ -1,7 +1,7 @@  - net8.0 + net10.0 enable enable diff --git a/src/MaksIT.Core.Tests/Security/Base64UrlUtilityTests.cs b/src/MaksIT.Core.Tests/Security/Base64UrlUtilityTests.cs new file mode 100644 index 0000000..7de0df8 --- /dev/null +++ b/src/MaksIT.Core.Tests/Security/Base64UrlUtilityTests.cs @@ -0,0 +1,190 @@ +namespace MaksIT.Core.Tests.Security; + +using MaksIT.Core.Security; + +public class Base64UrlUtilityTests { + + #region Encode Tests + + [Fact] + public void Encode_String_ReturnsBase64UrlString() { + // Arrange + var input = "Hello, World!"; + + // Act + var result = Base64UrlUtility.Encode(input); + + // Assert + Assert.NotNull(result); + Assert.DoesNotContain("+", result); + Assert.DoesNotContain("/", result); + Assert.DoesNotContain("=", result); + } + + [Fact] + public void Encode_EmptyString_ReturnsEmptyString() { + // Arrange + var input = ""; + + // Act + var result = Base64UrlUtility.Encode(input); + + // Assert + Assert.Equal("", result); + } + + [Fact] + public void Encode_ByteArray_ReturnsBase64UrlString() { + // Arrange + var input = new byte[] { 0x00, 0x01, 0x02, 0x03, 0xFF, 0xFE }; + + // Act + var result = Base64UrlUtility.Encode(input); + + // Assert + Assert.NotNull(result); + Assert.DoesNotContain("+", result); + Assert.DoesNotContain("/", result); + Assert.DoesNotContain("=", result); + } + + [Fact] + public void Encode_NullByteArray_ThrowsArgumentNullException() { + // Arrange + byte[] input = null!; + + // Act & Assert + Assert.Throws(() => Base64UrlUtility.Encode(input)); + } + + [Theory] + [InlineData("f", "Zg")] + [InlineData("fo", "Zm8")] + [InlineData("foo", "Zm9v")] + [InlineData("foob", "Zm9vYg")] + [InlineData("fooba", "Zm9vYmE")] + [InlineData("foobar", "Zm9vYmFy")] + public void Encode_RFC4648TestVectors_ReturnsExpectedResult(string input, string expected) { + // Act + var result = Base64UrlUtility.Encode(input); + + // Assert + Assert.Equal(expected, result); + } + + [Fact] + public void Encode_StringWithSpecialChars_HandlesCorrectly() { + // Arrange - characters that would produce + and / in standard base64 + var input = "subjects?_d"; + + // Act + var result = Base64UrlUtility.Encode(input); + + // Assert + Assert.DoesNotContain("+", result); + Assert.DoesNotContain("/", result); + } + + #endregion + + #region Decode Tests + + [Fact] + public void Decode_ValidBase64Url_ReturnsOriginalBytes() { + // Arrange + var original = new byte[] { 0x00, 0x01, 0x02, 0x03, 0xFF, 0xFE }; + var encoded = Base64UrlUtility.Encode(original); + + // Act + var decoded = Base64UrlUtility.Decode(encoded); + + // Assert + Assert.Equal(original, decoded); + } + + [Fact] + public void Decode_NullInput_ThrowsArgumentNullException() { + // Arrange + string input = null!; + + // Act & Assert + Assert.Throws(() => Base64UrlUtility.Decode(input)); + } + + [Theory] + [InlineData("Zg", "f")] + [InlineData("Zm8", "fo")] + [InlineData("Zm9v", "foo")] + [InlineData("Zm9vYg", "foob")] + [InlineData("Zm9vYmE", "fooba")] + [InlineData("Zm9vYmFy", "foobar")] + public void DecodeToString_RFC4648TestVectors_ReturnsExpectedResult(string input, string expected) { + // Act + var result = Base64UrlUtility.DecodeToString(input); + + // Assert + Assert.Equal(expected, result); + } + + [Fact] + public void DecodeToString_ValidBase64Url_ReturnsOriginalString() { + // Arrange + var original = "Hello, World!"; + var encoded = Base64UrlUtility.Encode(original); + + // Act + var decoded = Base64UrlUtility.DecodeToString(encoded); + + // Assert + Assert.Equal(original, decoded); + } + + [Fact] + public void Decode_EmptyString_ReturnsEmptyArray() { + // Arrange + var input = ""; + + // Act + var result = Base64UrlUtility.Decode(input); + + // Assert + Assert.Empty(result); + } + + #endregion + + #region Round-trip Tests + + [Theory] + [InlineData("Simple text")] + [InlineData("Text with spaces and numbers 123")] + [InlineData("Special chars: !@#$%^&*()")] + [InlineData("Unicode: 日本語 中文 한국어")] + [InlineData("")] + public void RoundTrip_String_ReturnsOriginal(string original) { + // Act + var encoded = Base64UrlUtility.Encode(original); + var decoded = Base64UrlUtility.DecodeToString(encoded); + + // Assert + Assert.Equal(original, decoded); + } + + [Fact] + public void RoundTrip_BinaryData_ReturnsOriginal() { + // Arrange + var original = new byte[256]; + for (int i = 0; i < 256; i++) { + original[i] = (byte)i; + } + + // Act + var encoded = Base64UrlUtility.Encode(original); + var decoded = Base64UrlUtility.Decode(encoded); + + // Assert + Assert.Equal(original, decoded); + } + + #endregion +} diff --git a/src/MaksIT.Core.Tests/Webapi/Models/PatchOperationTests.cs b/src/MaksIT.Core.Tests/Webapi/Models/PatchOperationTests.cs new file mode 100644 index 0000000..7620813 --- /dev/null +++ b/src/MaksIT.Core.Tests/Webapi/Models/PatchOperationTests.cs @@ -0,0 +1,75 @@ +namespace MaksIT.Core.Tests.Webapi.Models; + +using MaksIT.Core.Webapi.Models; + +public class PatchOperationTests { + + [Fact] + public void PatchOperation_HasExpectedValues() { + // Assert - verify all enum values exist + Assert.Equal(0, (int)PatchOperation.SetField); + Assert.Equal(1, (int)PatchOperation.RemoveField); + Assert.Equal(2, (int)PatchOperation.AddToCollection); + Assert.Equal(3, (int)PatchOperation.RemoveFromCollection); + } + + [Fact] + public void PatchOperation_HasFourValues() { + // Arrange + var values = Enum.GetValues(); + + // Assert + Assert.Equal(4, values.Length); + } + + [Theory] + [InlineData(PatchOperation.SetField, "SetField")] + [InlineData(PatchOperation.RemoveField, "RemoveField")] + [InlineData(PatchOperation.AddToCollection, "AddToCollection")] + [InlineData(PatchOperation.RemoveFromCollection, "RemoveFromCollection")] + public void PatchOperation_ToString_ReturnsCorrectName(PatchOperation operation, string expectedName) { + // Act + var result = operation.ToString(); + + // Assert + Assert.Equal(expectedName, result); + } + + [Theory] + [InlineData("SetField", PatchOperation.SetField)] + [InlineData("RemoveField", PatchOperation.RemoveField)] + [InlineData("AddToCollection", PatchOperation.AddToCollection)] + [InlineData("RemoveFromCollection", PatchOperation.RemoveFromCollection)] + public void PatchOperation_Parse_ReturnsCorrectValue(string name, PatchOperation expected) { + // Act + var result = Enum.Parse(name); + + // Assert + Assert.Equal(expected, result); + } + + [Fact] + public void PatchOperation_TryParse_InvalidValue_ReturnsFalse() { + // Act + var result = Enum.TryParse("InvalidOperation", out var value); + + // Assert + Assert.False(result); + } + + [Fact] + public void PatchOperation_IsDefined_ValidValues_ReturnsTrue() { + // Assert + Assert.True(Enum.IsDefined(typeof(PatchOperation), 0)); + Assert.True(Enum.IsDefined(typeof(PatchOperation), 1)); + Assert.True(Enum.IsDefined(typeof(PatchOperation), 2)); + Assert.True(Enum.IsDefined(typeof(PatchOperation), 3)); + } + + [Fact] + public void PatchOperation_IsDefined_InvalidValue_ReturnsFalse() { + // Assert + Assert.False(Enum.IsDefined(typeof(PatchOperation), 99)); + Assert.False(Enum.IsDefined(typeof(PatchOperation), -1)); + } +} diff --git a/src/MaksIT.Core/Logging/BaseFileLogger.cs b/src/MaksIT.Core/Logging/BaseFileLogger.cs index 025132c..8178cd2 100644 --- a/src/MaksIT.Core/Logging/BaseFileLogger.cs +++ b/src/MaksIT.Core/Logging/BaseFileLogger.cs @@ -8,7 +8,17 @@ public abstract class BaseFileLogger : ILogger, IDisposable { private readonly LockManager _lockManager = new LockManager(); private readonly string _folderPath; private readonly TimeSpan _retentionPeriod; - private static readonly Mutex _fileMutex = new Mutex(false, "Global\\MaksITLoggerFileMutex"); // Named mutex for cross-process locking + private static readonly Mutex _fileMutex = CreateMutex(); + + private static Mutex CreateMutex() { + try { + // Try Global\ first for cross-session synchronization (services, multiple users) + return new Mutex(false, "Global\\MaksITLoggerFileMutex"); + } catch (UnauthorizedAccessException) { + // Fall back to Local\ if Global\ is not allowed (sandboxed/restricted environment) + return new Mutex(false, "Local\\MaksITLoggerFileMutex"); + } + } protected BaseFileLogger(string folderPath, TimeSpan retentionPeriod) { _folderPath = folderPath; diff --git a/src/MaksIT.Core/Logging/FileLoggerProvider.cs b/src/MaksIT.Core/Logging/FileLoggerProvider.cs index 600a5ec..f10b7da 100644 --- a/src/MaksIT.Core/Logging/FileLoggerProvider.cs +++ b/src/MaksIT.Core/Logging/FileLoggerProvider.cs @@ -1,9 +1,4 @@ -using Microsoft.Extensions.Logging; -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; +using Microsoft.Extensions.Logging; namespace MaksIT.Core.Logging; @@ -18,7 +13,23 @@ public class FileLoggerProvider : ILoggerProvider { } public ILogger CreateLogger(string categoryName) { - return new FileLogger(_folderPath, _retentionPeriod); + var folderPath = ResolveFolderPath(categoryName); + return new FileLogger(folderPath, _retentionPeriod); + } + + private string ResolveFolderPath(string categoryName) { + var (prefix, value) = LoggerPrefix.Parse(categoryName); + + if (prefix == LoggerPrefix.Folder && !string.IsNullOrWhiteSpace(value)) { + return Path.Combine(_folderPath, SanitizeForPath(value)); + } + + return _folderPath; + } + + private static string SanitizeForPath(string input) { + var invalid = Path.GetInvalidPathChars(); + return string.Concat(input.Where(c => !invalid.Contains(c))); } public void Dispose() { } diff --git a/src/MaksIT.Core/Logging/JsonFileLoggerProvider.cs b/src/MaksIT.Core/Logging/JsonFileLoggerProvider.cs index 5d49192..5c09ce5 100644 --- a/src/MaksIT.Core/Logging/JsonFileLoggerProvider.cs +++ b/src/MaksIT.Core/Logging/JsonFileLoggerProvider.cs @@ -13,7 +13,23 @@ public class JsonFileLoggerProvider : ILoggerProvider { } public ILogger CreateLogger(string categoryName) { - return new JsonFileLogger(_folderPath, _retentionPeriod); + var folderPath = ResolveFolderPath(categoryName); + return new JsonFileLogger(folderPath, _retentionPeriod); + } + + private string ResolveFolderPath(string categoryName) { + var (prefix, value) = LoggerPrefix.Parse(categoryName); + + if (prefix == LoggerPrefix.Folder && !string.IsNullOrWhiteSpace(value)) { + return Path.Combine(_folderPath, SanitizeForPath(value)); + } + + return _folderPath; + } + + private static string SanitizeForPath(string input) { + var invalid = Path.GetInvalidPathChars(); + return string.Concat(input.Where(c => !invalid.Contains(c))); } public void Dispose() { } diff --git a/src/MaksIT.Core/Logging/LoggerPrefix.cs b/src/MaksIT.Core/Logging/LoggerPrefix.cs new file mode 100644 index 0000000..12673ff --- /dev/null +++ b/src/MaksIT.Core/Logging/LoggerPrefix.cs @@ -0,0 +1,29 @@ +using MaksIT.Core.Abstractions; + +namespace MaksIT.Core.Logging; + +public class LoggerPrefix : Enumeration { + public static readonly LoggerPrefix Folder = new(1, "Folder:"); + public static readonly LoggerPrefix Category = new(2, "Category:"); + public static readonly LoggerPrefix Tag = new(3, "Tag:"); + + private LoggerPrefix(int id, string name) : base(id, name) { } + + /// + /// Creates a category string with this prefix and the given value. + /// + public string WithValue(string value) => $"{Name}{value}"; + + /// + /// Tries to extract the prefix and value from a category name. + /// + public static (LoggerPrefix? prefix, string? value) Parse(string categoryName) { + foreach (var prefix in GetAll()) { + if (categoryName.StartsWith(prefix.Name, StringComparison.Ordinal)) { + var value = categoryName.Substring(prefix.Name.Length); + return (prefix, value); + } + } + return (null, null); + } +} diff --git a/src/MaksIT.Core/MaksIT.Core.csproj b/src/MaksIT.Core/MaksIT.Core.csproj index dc355fd..cb7245d 100644 --- a/src/MaksIT.Core/MaksIT.Core.csproj +++ b/src/MaksIT.Core/MaksIT.Core.csproj @@ -1,23 +1,44 @@ - + - net8.0 + net10.0 enable enable $(MSBuildProjectName.Replace(" ", "_")) + + true + $(NoWarn);CS1591 + MaksIT.Core - 1.6.0 + 1.6.1 Maksym Sadovnychyy MAKS-IT MaksIT.Core - MaksIT.Core is a collection of helper methods and extensions for .NET projects, designed to simplify common tasks and improve code readability. The library includes extensions for `Guid`, `string`, `Object`, and a base class for creating enumeration types. - dotnet;enumeration;string;guid;object;parsers;extensions;jwt;aes;crc32; + Copyright © Maksym Sadovnychyy (MAKS-IT) + A comprehensive .NET library providing utilities for logging (file/JSON with folder organization), security (JWT, JWK, JWS, TOTP, AES-GCM, password hashing), extensions (string, object, LINQ expressions, DateTime), saga orchestration, COMB GUIDs, Web API pagination, and more. + dotnet;extensions;logging;file-logger;jwt;jwk;jws;totp;2fa;aes-gcm;password-hasher;saga;comb-guid;pagination;crc32;base32;enumeration + + https://github.com/MAKS-IT-COM/maksit-core https://github.com/MAKS-IT-COM/maksit-core - false + git + README.md LICENSE.md + See https://github.com/MAKS-IT-COM/maksit-core/releases + + false + + + true + true + true + snupkg + + + true + true @@ -25,6 +46,11 @@ + + + + + @@ -37,4 +63,5 @@ + diff --git a/src/OllamaClient.psm1 b/src/OllamaClient.psm1 new file mode 100644 index 0000000..859a5fd --- /dev/null +++ b/src/OllamaClient.psm1 @@ -0,0 +1,572 @@ +<# +.SYNOPSIS + Generic Ollama API client module for PowerShell. + +.DESCRIPTION + Provides a simple interface to interact with Ollama's local LLM API: + - Text generation (chat/completion) + - Embeddings generation + - Model management + - RAG utilities (cosine similarity, clustering) + +.REQUIREMENTS + - Ollama running locally (default: http://localhost:11434) + +.USAGE + Import-Module .\OllamaClient.psm1 + + # Configure + Set-OllamaConfig -ApiUrl "http://localhost:11434" + + # Check availability + if (Test-OllamaAvailable) { + # Generate text + $response = Invoke-OllamaPrompt -Model "llama3.1:8b" -Prompt "Hello!" + + # Get embeddings + $embedding = Get-OllamaEmbedding -Model "nomic-embed-text" -Text "Sample text" + } +#> + +# ============================================================================== +# MODULE CONFIGURATION +# ============================================================================== + +$script:OllamaConfig = @{ + ApiUrl = "http://localhost:11434" + DefaultTimeout = 180 + DefaultTemperature = 0.2 + DefaultMaxTokens = 0 + DefaultContextWindow = 0 +} + +# ============================================================================== +# CONFIGURATION FUNCTIONS +# ============================================================================== + +function Set-OllamaConfig { + <# + .SYNOPSIS + Configure Ollama client settings. + .PARAMETER ApiUrl + Ollama API endpoint URL (default: http://localhost:11434). + .PARAMETER DefaultTimeout + Default timeout in seconds for API calls. + .PARAMETER DefaultTemperature + Default temperature for text generation (0.0-1.0). + .PARAMETER DefaultMaxTokens + Default maximum tokens to generate. + .PARAMETER DefaultContextWindow + Default context window size (num_ctx). + #> + param( + [string]$ApiUrl, + [int]$DefaultTimeout, + [double]$DefaultTemperature, + [int]$DefaultMaxTokens, + [int]$DefaultContextWindow + ) + + if ($ApiUrl) { + $script:OllamaConfig.ApiUrl = $ApiUrl + } + + if ($PSBoundParameters.ContainsKey('DefaultTimeout')) { + $script:OllamaConfig.DefaultTimeout = $DefaultTimeout + } + + if ($PSBoundParameters.ContainsKey('DefaultTemperature')) { + $script:OllamaConfig.DefaultTemperature = $DefaultTemperature + } + + if ($PSBoundParameters.ContainsKey('DefaultMaxTokens')) { + $script:OllamaConfig.DefaultMaxTokens = $DefaultMaxTokens + } + + if ($PSBoundParameters.ContainsKey('DefaultContextWindow')) { + $script:OllamaConfig.DefaultContextWindow = $DefaultContextWindow + } +} + +function Get-OllamaConfig { + <# + .SYNOPSIS + Get current Ollama client configuration. + #> + return $script:OllamaConfig.Clone() +} + +# ============================================================================== +# CONNECTION & STATUS +# ============================================================================== + +function Test-OllamaAvailable { + <# + .SYNOPSIS + Check if Ollama API is available and responding. + .OUTPUTS + Boolean indicating if Ollama is available. + #> + try { + $null = Invoke-RestMethod -Uri "$($script:OllamaConfig.ApiUrl)/api/tags" -TimeoutSec 5 -ErrorAction Stop + return $true + } + catch { + return $false + } +} + +function Get-OllamaModels { + <# + .SYNOPSIS + Get list of available models from Ollama. + .OUTPUTS + Array of model objects with name, size, and other properties. + #> + try { + $response = Invoke-RestMethod -Uri "$($script:OllamaConfig.ApiUrl)/api/tags" -TimeoutSec 10 -ErrorAction Stop + return $response.models + } + catch { + Write-Warning "Failed to get Ollama models: $_" + return @() + } +} + +function Test-OllamaModel { + <# + .SYNOPSIS + Check if a specific model is available in Ollama. + .PARAMETER Model + Model name to check. + #> + param([Parameter(Mandatory)][string]$Model) + + $models = Get-OllamaModels + return ($models | Where-Object { $_.name -eq $Model -or $_.name -like "${Model}:*" }) -ne $null +} + +# ============================================================================== +# TEXT GENERATION +# ============================================================================== + +function Invoke-OllamaPrompt { + <# + .SYNOPSIS + Send a prompt to an Ollama model and get a response. + .PARAMETER Model + Model name (e.g., "llama3.1:8b", "qwen2.5-coder:7b"). + .PARAMETER Prompt + The prompt text to send. + .PARAMETER ContextWindow + Context window size (num_ctx). Uses default if not specified. + .PARAMETER MaxTokens + Maximum tokens to generate (num_predict). Uses default if not specified. + .PARAMETER Temperature + Temperature for generation (0.0-1.0). Uses default if not specified. + .PARAMETER Timeout + Timeout in seconds. Uses default if not specified. + .PARAMETER System + Optional system prompt. + .OUTPUTS + Generated text response or $null if failed. + #> + param( + [Parameter(Mandatory)][string]$Model, + [Parameter(Mandatory)][string]$Prompt, + [int]$ContextWindow, + [int]$MaxTokens, + [double]$Temperature, + [int]$Timeout, + [string]$System + ) + + $config = $script:OllamaConfig + + # Use defaults if not specified + if (-not $PSBoundParameters.ContainsKey('MaxTokens')) { $MaxTokens = $config.DefaultMaxTokens } + if (-not $PSBoundParameters.ContainsKey('Temperature')) { $Temperature = $config.DefaultTemperature } + if (-not $PSBoundParameters.ContainsKey('Timeout')) { $Timeout = $config.DefaultTimeout } + + $options = @{ + temperature = $Temperature + } + + # Only set num_predict if MaxTokens > 0 (0 = unlimited/model default) + if ($MaxTokens -and $MaxTokens -gt 0) { + $options.num_predict = $MaxTokens + } + + # Only set context window if explicitly provided (let model use its default otherwise) + if ($ContextWindow -and $ContextWindow -gt 0) { + $options.num_ctx = $ContextWindow + } + + $body = @{ + model = $Model + prompt = $Prompt + stream = $false + options = $options + } + + if ($System) { + $body.system = $System + } + + $jsonBody = $body | ConvertTo-Json -Depth 3 + + # TimeoutSec 0 = infinite wait + $restParams = @{ + Uri = "$($config.ApiUrl)/api/generate" + Method = "Post" + Body = $jsonBody + ContentType = "application/json" + } + if ($Timeout -gt 0) { $restParams.TimeoutSec = $Timeout } + + try { + $response = Invoke-RestMethod @restParams + return $response.response.Trim() + } + catch { + Write-Warning "Ollama prompt failed: $_" + return $null + } +} + +function Invoke-OllamaChat { + <# + .SYNOPSIS + Send a chat conversation to an Ollama model. + .PARAMETER Model + Model name. + .PARAMETER Messages + Array of message objects with 'role' and 'content' properties. + Roles: "system", "user", "assistant" + .PARAMETER ContextWindow + Context window size. + .PARAMETER MaxTokens + Maximum tokens to generate. + .PARAMETER Temperature + Temperature for generation. + .OUTPUTS + Generated response text or $null if failed. + #> + param( + [Parameter(Mandatory)][string]$Model, + [Parameter(Mandatory)][array]$Messages, + [int]$ContextWindow, + [int]$MaxTokens, + [double]$Temperature, + [int]$Timeout + ) + + $config = $script:OllamaConfig + + if (-not $PSBoundParameters.ContainsKey('MaxTokens')) { $MaxTokens = $config.DefaultMaxTokens } + if (-not $PSBoundParameters.ContainsKey('Temperature')) { $Temperature = $config.DefaultTemperature } + if (-not $PSBoundParameters.ContainsKey('Timeout')) { $Timeout = $config.DefaultTimeout } + + $options = @{ + temperature = $Temperature + } + + # Only set num_predict if MaxTokens > 0 (0 = unlimited/model default) + if ($MaxTokens -and $MaxTokens -gt 0) { + $options.num_predict = $MaxTokens + } + + # Only set context window if explicitly provided + if ($ContextWindow -and $ContextWindow -gt 0) { + $options.num_ctx = $ContextWindow + } + + $body = @{ + model = $Model + messages = $Messages + stream = $false + options = $options + } + + $jsonBody = $body | ConvertTo-Json -Depth 4 + + # TimeoutSec 0 = infinite wait + $restParams = @{ + Uri = "$($config.ApiUrl)/api/chat" + Method = "Post" + Body = $jsonBody + ContentType = "application/json" + } + if ($Timeout -gt 0) { $restParams.TimeoutSec = $Timeout } + + try { + $response = Invoke-RestMethod @restParams + return $response.message.content.Trim() + } + catch { + Write-Warning "Ollama chat failed: $_" + return $null + } +} + +# ============================================================================== +# EMBEDDINGS +# ============================================================================== + +function Get-OllamaEmbedding { + <# + .SYNOPSIS + Get embedding vector for text using an Ollama embedding model. + .PARAMETER Model + Embedding model name (e.g., "nomic-embed-text", "mxbai-embed-large"). + .PARAMETER Text + Text to embed. + .PARAMETER Timeout + Timeout in seconds. + .OUTPUTS + Array of doubles representing the embedding vector, or $null if failed. + #> + param( + [Parameter(Mandatory)][string]$Model, + [Parameter(Mandatory)][string]$Text, + [int]$Timeout = 30 + ) + + $body = @{ + model = $Model + prompt = $Text + } | ConvertTo-Json + + try { + $response = Invoke-RestMethod -Uri "$($script:OllamaConfig.ApiUrl)/api/embeddings" -Method Post -Body $body -ContentType "application/json" -TimeoutSec $Timeout + return $response.embedding + } + catch { + Write-Warning "Ollama embedding failed: $_" + return $null + } +} + +function Get-OllamaEmbeddings { + <# + .SYNOPSIS + Get embeddings for multiple texts (batch). + .PARAMETER Model + Embedding model name. + .PARAMETER Texts + Array of texts to embed. + .PARAMETER ShowProgress + Show progress indicator. + .OUTPUTS + Array of objects with Text and Embedding properties. + #> + param( + [Parameter(Mandatory)][string]$Model, + [Parameter(Mandatory)][string[]]$Texts, + [switch]$ShowProgress + ) + + $results = @() + $total = $Texts.Count + $current = 0 + + foreach ($text in $Texts) { + $current++ + if ($ShowProgress) { + Write-Progress -Activity "Getting embeddings" -Status "$current of $total" -PercentComplete (($current / $total) * 100) + } + + $embedding = Get-OllamaEmbedding -Model $Model -Text $text + if ($embedding) { + $results += @{ + Text = $text + Embedding = $embedding + } + } + } + + if ($ShowProgress) { + Write-Progress -Activity "Getting embeddings" -Completed + } + + return $results +} + +# ============================================================================== +# RAG UTILITIES +# ============================================================================== + +function Get-CosineSimilarity { + <# + .SYNOPSIS + Calculate cosine similarity between two embedding vectors. + .PARAMETER Vector1 + First embedding vector. + .PARAMETER Vector2 + Second embedding vector. + .OUTPUTS + Cosine similarity value between -1 and 1. + #> + param( + [Parameter(Mandatory)][double[]]$Vector1, + [Parameter(Mandatory)][double[]]$Vector2 + ) + + if ($Vector1.Length -ne $Vector2.Length) { + Write-Warning "Vector lengths don't match: $($Vector1.Length) vs $($Vector2.Length)" + return 0 + } + + $dotProduct = 0.0 + $norm1 = 0.0 + $norm2 = 0.0 + + for ($i = 0; $i -lt $Vector1.Length; $i++) { + $dotProduct += $Vector1[$i] * $Vector2[$i] + $norm1 += $Vector1[$i] * $Vector1[$i] + $norm2 += $Vector2[$i] * $Vector2[$i] + } + + $norm1 = [Math]::Sqrt($norm1) + $norm2 = [Math]::Sqrt($norm2) + + if ($norm1 -eq 0 -or $norm2 -eq 0) { return 0 } + return $dotProduct / ($norm1 * $norm2) +} + +function Group-TextsByEmbedding { + <# + .SYNOPSIS + Cluster texts by embedding similarity. + .PARAMETER Model + Embedding model name. + .PARAMETER Texts + Array of texts to cluster. + .PARAMETER SimilarityThreshold + Minimum cosine similarity to group texts together (0.0-1.0). + .PARAMETER ShowProgress + Show progress during embedding. + .OUTPUTS + Array of clusters (each cluster is an array of texts). + #> + param( + [Parameter(Mandatory)][string]$Model, + [Parameter(Mandatory)][string[]]$Texts, + [double]$SimilarityThreshold = 0.65, + [switch]$ShowProgress + ) + + if ($Texts.Length -eq 0) { return @() } + if ($Texts.Length -eq 1) { return @(,@($Texts[0])) } + + # Get embeddings + $embeddings = Get-OllamaEmbeddings -Model $Model -Texts $Texts -ShowProgress:$ShowProgress + + if ($embeddings.Length -eq 0) { + return @($Texts | ForEach-Object { ,@($_) }) + } + + # Mark all as unclustered + $embeddings | ForEach-Object { $_.Clustered = $false } + + # Cluster similar texts + $clusters = @() + + for ($i = 0; $i -lt $embeddings.Length; $i++) { + if ($embeddings[$i].Clustered) { continue } + + $cluster = @($embeddings[$i].Text) + $embeddings[$i].Clustered = $true + + for ($j = $i + 1; $j -lt $embeddings.Length; $j++) { + if ($embeddings[$j].Clustered) { continue } + + $similarity = Get-CosineSimilarity -Vector1 $embeddings[$i].Embedding -Vector2 $embeddings[$j].Embedding + + if ($similarity -ge $SimilarityThreshold) { + $cluster += $embeddings[$j].Text + $embeddings[$j].Clustered = $true + } + } + + $clusters += ,@($cluster) + } + + return $clusters +} + +function Find-SimilarTexts { + <# + .SYNOPSIS + Find texts most similar to a query using embeddings. + .PARAMETER Model + Embedding model name. + .PARAMETER Query + Query text to find similar texts for. + .PARAMETER Texts + Array of texts to search through. + .PARAMETER TopK + Number of most similar texts to return. + .PARAMETER MinSimilarity + Minimum similarity threshold. + .OUTPUTS + Array of objects with Text and Similarity properties, sorted by similarity. + #> + param( + [Parameter(Mandatory)][string]$Model, + [Parameter(Mandatory)][string]$Query, + [Parameter(Mandatory)][string[]]$Texts, + [int]$TopK = 5, + [double]$MinSimilarity = 0.0 + ) + + # Get query embedding + $queryEmbedding = Get-OllamaEmbedding -Model $Model -Text $Query + if (-not $queryEmbedding) { return @() } + + # Get text embeddings and calculate similarities + $results = @() + foreach ($text in $Texts) { + $textEmbedding = Get-OllamaEmbedding -Model $Model -Text $text + if ($textEmbedding) { + $similarity = Get-CosineSimilarity -Vector1 $queryEmbedding -Vector2 $textEmbedding + if ($similarity -ge $MinSimilarity) { + $results += @{ + Text = $text + Similarity = $similarity + } + } + } + } + + # Sort by similarity and return top K + return $results | Sort-Object -Property Similarity -Descending | Select-Object -First $TopK +} + +# ============================================================================== +# MODULE EXPORTS +# ============================================================================== + +Export-ModuleMember -Function @( + # Configuration + 'Set-OllamaConfig' + 'Get-OllamaConfig' + + # Connection & Status + 'Test-OllamaAvailable' + 'Get-OllamaModels' + 'Test-OllamaModel' + + # Text Generation + 'Invoke-OllamaPrompt' + 'Invoke-OllamaChat' + + # Embeddings + 'Get-OllamaEmbedding' + 'Get-OllamaEmbeddings' + + # RAG Utilities + 'Get-CosineSimilarity' + 'Group-TextsByEmbedding' + 'Find-SimilarTexts' +) diff --git a/src/Release-NuGetPackage.ps1 b/src/Release-NuGetPackage.ps1 index 631dea8..b3ff87e 100644 --- a/src/Release-NuGetPackage.ps1 +++ b/src/Release-NuGetPackage.ps1 @@ -1,60 +1,983 @@ -# Retrieve the API key from the environment variable -$apiKey = $env:NUGET_MAKS_IT -if (-not $apiKey) { - Write-Host "Error: API key not found in environment variable NUGET_MAKS_IT." +<# +.SYNOPSIS + Release script for MaksIT.Core NuGet package and GitHub release. + +.DESCRIPTION + This script automates the release process for MaksIT.Core library: + - Validates environment and prerequisites + - Checks if version already exists on NuGet.org + - Scans for vulnerable packages (security check) + - Builds and tests the project (Windows + Linux via Docker) + - Collects code coverage with Coverlet (threshold enforcement optional) + - Generates test result artifacts (TRX format) and coverage reports + - Displays test results with pass/fail counts and coverage percentage + - Publishes to NuGet.org + - Creates a GitHub release with changelog and package assets + - Shows timing summary for all steps + +.REQUIREMENTS + Environment Variables: + - NUGET_MAKS_IT : NuGet.org API key for publishing packages + - GITHUB_MAKS_IT_COM : GitHub Personal Access Token (needs 'repo' scope) + + Tools (Required): + - dotnet CLI : For building, testing, and packing + - git : For version control operations + - gh (GitHub CLI) : For creating GitHub releases + - docker : For cross-platform Linux testing + +.WORKFLOW + 1. VALIDATION PHASE + - Check required environment variables (NuGet key, GitHub token) + - Check required tools are installed (dotnet, git, gh, docker) + - Verify no uncommitted changes in working directory + - Authenticate GitHub CLI + + 2. VERSION & RELEASE CHECK PHASE + - Read latest version from CHANGELOG.md + - Find commit with matching version tag + - Validate tag is on configured release branch (from scriptsettings.json) + - Check if already released on NuGet.org (skip if yes) + - Read target framework from MaksIT.Core.csproj + - Extract release notes from CHANGELOG.md for current version + + 3. SECURITY SCAN + - Check for vulnerable packages (dotnet list package --vulnerable) + - Fail or warn based on $failOnVulnerabilities setting + + 4. BUILD & TEST PHASE + - Clean previous builds (delete bin/obj folders) + - Restore NuGet packages + - Windows: Build main project -> Build test project -> Run tests with coverage + - Analyze code coverage (fail if below threshold when configured) + - Linux (Docker): Build main project -> Build test project -> Run tests (TRX report) + - Rebuild for Windows (Docker may overwrite bin/obj) + - Create NuGet package (.nupkg) and symbols (.snupkg) + - All steps are timed for performance tracking + + 5. CONFIRMATION PHASE + - Display release summary + - If -DryRun: Show summary and exit (no changes made) + - Prompt user for confirmation before proceeding + + 6. NUGET RELEASE PHASE + - Push package to NuGet.org + - Skip if version already exists (--skip-duplicate) + + 7. GITHUB RELEASE PHASE + - Delete existing GitHub release if present (re-release scenario) + - Push tag to remote if not already there + - Create GitHub release with: + * Release notes from CHANGELOG.md + * .nupkg and .snupkg as downloadable assets + + 8. COMPLETION PHASE + - Display timing summary for all steps + - Display test results summary + - Display success summary with links + - Open NuGet and GitHub release pages in browser + - TODO: Email notification (template provided) + - TODO: Package signing (template provided) + +.PARAMETER DryRun + If specified, runs build and tests without publishing. + - Bypasses branch check (warns instead) + - No changes are made to NuGet, GitHub, or git tags + +.USAGE + Before running: + 1. Ensure Docker Desktop is running (for Linux tests) + 2. Update version in MaksIT.Core.csproj + 3. Run .\Generate-Changelog.ps1 to update CHANGELOG.md and LICENSE.md + 4. Review and commit all changes + 5. Create version tag: git tag v1.x.x + 6. Run: .\Release-NuGetPackage.ps1 + + Note: The script finds the commit with the tag matching CHANGELOG.md version. + You can run it from any branch/commit - it releases the tagged commit. + + Dry run (test without publishing): + .\Release-NuGetPackage.ps1 -DryRun + + Generate changelog and update LICENSE year: + .\Generate-Changelog.ps1 + .\Generate-Changelog.ps1 -DryRun + +.CONFIGURATION + All settings are stored in scriptsettings.json: + - qualityGates: Coverage threshold, vulnerability checks + - packageSigning: Code signing certificate configuration + - emailNotification: SMTP settings for release notifications + +.NOTES + Author: Maksym Sadovnychyy (MAKS-IT) + Repository: https://github.com/MAKS-IT-COM/maksit-core +#> + +param( + [switch]$DryRun +) + +# ============================================================================== +# PATH CONFIGURATION +# ============================================================================== + +$solutionDir = Split-Path -Parent $MyInvocation.MyCommand.Path +$repoRoot = Split-Path -Parent $solutionDir +$projectDir = "$solutionDir\MaksIT.Core" +$outputDir = "$projectDir\bin\Release" +$testProjectDir = "$solutionDir\MaksIT.Core.Tests" +$csprojPath = "$projectDir\MaksIT.Core.csproj" +$testResultsDir = "$repoRoot\TestResults" + +# ============================================================================== +# IMPORT MODULES +# ============================================================================== + +# Import build utilities module +$buildUtilsPath = Join-Path $solutionDir "BuildUtils.psm1" +if (Test-Path $buildUtilsPath) { + Import-Module $buildUtilsPath -Force +} +else { + Write-Error "BuildUtils.psm1 not found at $buildUtilsPath" exit 1 } +# Initialize step timer +Initialize-StepTimer + +# ============================================================================== +# CONFIGURATION +# ============================================================================== + +if ($TestChangelog) { + Write-Banner "TEST CHANGELOG MODE - AI generation only" +} +elseif ($DryRun) { + Write-Banner "DRY RUN MODE - No changes will be made" +} + # NuGet source $nugetSource = "https://api.nuget.org/v3/index.json" -# Define paths -$solutionDir = Split-Path -Parent $MyInvocation.MyCommand.Path -$projectDir = "$solutionDir\MaksIT.Core" -$outputDir = "$projectDir\bin\Release" -$testProjectDir = "$solutionDir\MaksIT.Core.Tests" +# ============================================================================== +# LOAD SETTINGS FROM JSON +# ============================================================================== -# Clean previous builds -Write-Host "Cleaning previous builds..." -dotnet clean $projectDir -c Release -dotnet clean $testProjectDir -c Release - -# Build the test project -Write-Host "Building the test project..." -dotnet build $testProjectDir -c Release - -# Run tests -Write-Host "Running tests..." -dotnet test $testProjectDir -c Release -if ($LASTEXITCODE -ne 0) { - Write-Host "Tests failed. Aborting release process." +$settingsPath = Join-Path $solutionDir "scriptsettings.json" +if (Test-Path $settingsPath) { + $settings = Get-Content $settingsPath -Raw | ConvertFrom-Json + Write-Host "Loaded settings from scriptsettings.json" +} +else { + Write-Error "Settings file not found: $settingsPath" exit 1 } -# Build the main project -Write-Host "Building the project..." -dotnet build $projectDir -c Release +# Resolve paths from settings (relative to script location) +$changelogPath = if ($settings.paths.changelogPath) { + [System.IO.Path]::GetFullPath((Join-Path $solutionDir $settings.paths.changelogPath)) +} +else { + "$repoRoot\CHANGELOG.md" +} -# Pack the NuGet package -Write-Host "Packing the project..." -dotnet pack $projectDir -c Release --no-build +# Release branch setting +$releaseBranch = if ($settings.release.branch) { $settings.release.branch } else { "main" } -# Look for the .nupkg file -$packageFile = Get-ChildItem -Path $outputDir -Filter "*.nupkg" -Recurse | Sort-Object LastWriteTime -Descending | Select-Object -First 1 +# ============================================================================== +# SECRETS FROM ENVIRONMENT VARIABLES +# ============================================================================== -if ($packageFile) { - Write-Host "Package created successfully: $($packageFile.FullName)" - - # Push the package to NuGet - Write-Host "Pushing the package to NuGet..." - dotnet nuget push $packageFile.FullName -k $apiKey -s $nugetSource --skip-duplicate - - if ($LASTEXITCODE -eq 0) { - Write-Host "Package pushed successfully." - } else { - Write-Host "Failed to push the package." +# Get env var names from settings (allows customization) +$envVars = $settings.environmentVariables + +# NuGet API key +$nugetApiKey = [Environment]::GetEnvironmentVariable($envVars.nugetApiKey) +if (-not $nugetApiKey) { + Write-Error "Error: API key not found in environment variable $($envVars.nugetApiKey)." + exit 1 +} + +# GitHub token (set for gh CLI) +$env:GH_TOKEN = [Environment]::GetEnvironmentVariable($envVars.githubToken) + +# Package signing password (optional) +$packageSigningCertPassword = [Environment]::GetEnvironmentVariable($envVars.signingCertPassword) + +# SMTP password (optional) +$smtpPassword = [Environment]::GetEnvironmentVariable($envVars.smtpPassword) + +# ============================================================================== +# NON-SECRET SETTINGS +# ============================================================================== + +# Quality gates +$coverageThreshold = $settings.qualityGates.coverageThreshold +$failOnVulnerabilities = $settings.qualityGates.failOnVulnerabilities + +# Package signing (non-secret parts) +$packageSigningEnabled = $settings.packageSigning.enabled +$packageSigningCertPath = $settings.packageSigning.certificatePath +$packageSigningTimestamper = $settings.packageSigning.timestampServer + +# Email notification (non-secret parts) +$emailEnabled = $settings.emailNotification.enabled +$emailSmtpServer = $settings.emailNotification.smtpServer +$emailSmtpPort = $settings.emailNotification.smtpPort +$emailUseSsl = $settings.emailNotification.useSsl +$emailFrom = $settings.emailNotification.from +$emailTo = $settings.emailNotification.to + +# ============================================================================== +# PREREQUISITE CHECKS +# ============================================================================== + +Assert-Commands @("dotnet", "git", "gh", "docker") + +# ============================================================================== +# GIT STATUS VALIDATION +# ============================================================================== + +# Check for uncommitted changes (always block) +Write-Host "Checking for uncommitted changes..." +$gitStatus = Get-GitStatus + +if (-not $gitStatus.IsClean) { + $fileCount = $gitStatus.Staged.Count + $gitStatus.Modified.Count + $gitStatus.Untracked.Count + $gitStatus.Deleted.Count + Write-Host "ERROR: You have $fileCount uncommitted file(s). Commit or stash them first." -ForegroundColor Red + Show-GitStatus $gitStatus + exit 1 +} + +Write-Host "Working directory is clean." + +# ============================================================================== +# VERSION & TAG DISCOVERY +# ============================================================================== + +# Read latest version from CHANGELOG.md +Write-Host "Reading version from CHANGELOG.md..." +if (-not (Test-Path $changelogPath)) { + Write-Error "CHANGELOG.md not found at $changelogPath" + exit 1 +} + +$changelogContent = Get-Content $changelogPath -Raw +$versionMatch = [regex]::Match($changelogContent, '##\s+v(\d+\.\d+\.\d+)') + +if (-not $versionMatch.Success) { + Write-Error "No version found in CHANGELOG.md (expected format: ## v1.2.3)" + exit 1 +} + +$version = $versionMatch.Groups[1].Value +$tag = "v$version" +Write-Host "Latest changelog version: $version" + +# Find commit with this tag +$tagCommit = git rev-parse "$tag^{commit}" 2>$null +if ($LASTEXITCODE -ne 0 -or -not $tagCommit) { + Write-Host "" + Write-Host "ERROR: Tag $tag not found." -ForegroundColor Red + Write-Host "The release process requires a tag matching the changelog version." -ForegroundColor Yellow + Write-Host "" + Write-Host "To fix, run:" -ForegroundColor Cyan + Write-Host " git tag $tag " -ForegroundColor Cyan + Write-Host " git push origin $tag" -ForegroundColor Cyan + exit 1 +} + +$shortCommit = $tagCommit.Substring(0, 7) +Write-Host "Found tag $tag -> commit $shortCommit" + +# Validate tag commit is on release branch +if ($releaseBranch) { + $branchContains = git branch --contains $tagCommit --list $releaseBranch 2>$null + if (-not $branchContains) { + Write-Host "" + Write-Host "ERROR: Tag $tag (commit $shortCommit) is not on branch '$releaseBranch'." -ForegroundColor Red + Write-Host "Release is only allowed from the configured branch." -ForegroundColor Yellow + Write-Host "" + Write-Host "Either:" -ForegroundColor Cyan + Write-Host " 1. Merge the tagged commit to '$releaseBranch'" -ForegroundColor Cyan + Write-Host " 2. Change release.branch in scriptsettings.json" -ForegroundColor Cyan + exit 1 } -} else { - Write-Host "Package creation failed. No .nupkg file found." + Write-Host "Tag is on branch '$releaseBranch'" -ForegroundColor Green +} + +# Extract target framework from csproj (needed for Docker image) +[xml]$csproj = Get-Content $csprojPath +$targetFramework = ($csproj.Project.PropertyGroup | + Where-Object { $_.TargetFramework } | + Select-Object -First 1).TargetFramework + +if (-not $targetFramework) { + # Try TargetFrameworks (plural) for multi-target projects, take first one + $targetFrameworks = ($csproj.Project.PropertyGroup | + Where-Object { $_.TargetFrameworks } | + Select-Object -First 1).TargetFrameworks + if ($targetFrameworks) { + $targetFramework = ($targetFrameworks -split ';')[0] + } +} + +if (-not $targetFramework) { + Write-Error "TargetFramework not found in $csprojPath" exit 1 } + +# Convert "net8.0" to "8.0" for Docker image tag +$dotnetVersion = $targetFramework -replace '^net', '' +Write-Host "Target framework: $targetFramework (Docker SDK: $dotnetVersion)" + +# ============================================================================== +# CHANGELOG VALIDATION +# ============================================================================== + +$tag = "v$version" +$releaseName = "Release $version" + +Start-Step "Validating CHANGELOG.md" + +if (-not (Test-Path $changelogPath)) { + Complete-Step "FAIL" + Write-Error "CHANGELOG.md not found. Run .\Generate-Changelog.ps1 first." + exit 1 +} + +$changelog = Get-Content $changelogPath -Raw +$pattern = "(?ms)^##\s+v$([regex]::Escape($version))\b.*?(?=^##\s+v\d+\.\d+|\Z)" +$match = [regex]::Match($changelog, $pattern) + +if (-not $match.Success) { + Complete-Step "FAIL" + Write-Host "" + Write-Host "No CHANGELOG entry for v$version" -ForegroundColor Red + Write-Host "Run: .\Generate-Changelog.ps1" -ForegroundColor Yellow + exit 1 +} + +$releaseNotes = $match.Value.Trim() +Complete-Step "OK" +Write-Host "" +Write-Host "Release notes (v$version):" -ForegroundColor Gray +Write-Host $releaseNotes + +# ============================================================================== +# NUGET VERSION CHECK +# ============================================================================== + +Start-Step "Checking if already released" +$packageName = "MaksIT.Core" +$nugetCheckUrl = "https://api.nuget.org/v3-flatcontainer/$($packageName.ToLower())/index.json" + +try { + $existingVersions = (Invoke-RestMethod -Uri $nugetCheckUrl -ErrorAction Stop).versions + if ($existingVersions -contains $version) { + Complete-Step "SKIP" + Write-Host "" + Write-Host "Version $version already released on NuGet.org" -ForegroundColor Green + Write-Host "Nothing to do. To release a new version:" -ForegroundColor Gray + Write-Host " 1. Update version in csproj" -ForegroundColor Gray + Write-Host " 2. Run Generate-Changelog.ps1" -ForegroundColor Gray + Write-Host " 3. Commit and tag: git tag v{new-version}" -ForegroundColor Gray + Write-Host " 4. Run this script again" -ForegroundColor Gray + exit 0 + } + Write-Host " Version $version not yet on NuGet.org - will release" -ForegroundColor Green + Complete-Step "OK" +} +catch { + Write-Host " Could not check NuGet (will continue anyway): $_" -ForegroundColor Yellow + Complete-Step "SKIP" +} + +# ============================================================================== +# REPOSITORY DETECTION +# ============================================================================== + +# Get remote URL and extract owner/repo +$remoteUrl = git config --get remote.origin.url +if ($LASTEXITCODE -ne 0 -or -not $remoteUrl) { + Write-Error "Could not determine git remote origin URL." + exit 1 +} + +# Extract owner/repo from URL (supports HTTPS and SSH) +if ($remoteUrl -match "[:/](?[^/]+)/(?[^/.]+)(\.git)?$") { + $owner = $matches['owner'] + $repoName = $matches['repo'] + $gitHubRepo = "$owner/$repoName" +} +else { + Write-Error "Could not parse GitHub repo from remote URL: $remoteUrl" + exit 1 +} + +Write-Host "Repository detected: $gitHubRepo" + +# Ensure GH_TOKEN is set +if (-not $env:GH_TOKEN) { + Write-Error "GitHub token not found. Set environment variable: $($envVars.githubToken)" + exit 1 +} + +# Test GitHub CLI authentication +Write-Host "Authenticating GitHub CLI using GH_TOKEN..." +$authTest = gh api user 2>$null + +if ($LASTEXITCODE -ne 0 -or -not $authTest) { + Write-Error "GitHub CLI authentication failed. GH_TOKEN may be invalid or missing repo scope." + exit 1 +} + +Write-Host "GitHub CLI authenticated successfully via GH_TOKEN." + +# ============================================================================== +# BUILD & TEST PHASE +# ============================================================================== + +Start-Step "Cleaning previous builds" +# Use direct folder deletion instead of dotnet clean (avoids package resolution issues) +$foldersToClean = @( + "$projectDir\bin", + "$projectDir\obj", + "$testProjectDir\bin", + "$testProjectDir\obj" +) +foreach ($folder in $foldersToClean) { + if (Test-Path $folder) { + Remove-Item -Path $folder -Recurse -Force -ErrorAction SilentlyContinue + Write-Host " Removed: $folder" + } +} + +Complete-Step "OK" + +Start-Step "Restoring NuGet packages" +dotnet restore $solutionDir\MaksIT.Core.sln --nologo -v q + +if ($LASTEXITCODE -ne 0) { + Complete-Step "FAIL" + Write-Error "NuGet restore failed. Check your internet connection or run 'dotnet nuget locals all --clear' and try again." + exit 1 +} + +Complete-Step "OK" + +# ============================================================================== +# SECURITY SCAN +# ============================================================================== + +Start-Step "Scanning for vulnerable packages" +$vulnerabilityOutput = dotnet list $solutionDir\MaksIT.Core.sln package --vulnerable 2>&1 | Out-String + +# Check if vulnerabilities were found +$hasVulnerabilities = $vulnerabilityOutput -match "has the following vulnerable packages" + +if ($hasVulnerabilities) { + Write-Host $vulnerabilityOutput -ForegroundColor Yellow + if ($failOnVulnerabilities -and -not $DryRun) { + Complete-Step "FAIL" + Write-Error "Vulnerable packages detected. Update packages or set `$failOnVulnerabilities = `$false to bypass." + exit 1 + } + else { + Write-Host " WARNING: Vulnerable packages found (bypassed)" -ForegroundColor Yellow + Complete-Step "WARN" + } +} +else { + Write-Host " No known vulnerabilities found" -ForegroundColor Green + Complete-Step "OK" +} + +# ============================================================================== +# WINDOWS BUILD & TEST +# ============================================================================== + +Start-Step "Building main project (Windows)" +dotnet build $projectDir -c Release --nologo -v q --no-restore + +if ($LASTEXITCODE -ne 0) { + Complete-Step "FAIL" + Write-Error "Main project build failed." + exit 1 +} + +Complete-Step "OK" + +Start-Step "Building test project (Windows)" +dotnet build $testProjectDir -c Release --nologo -v q --no-restore + +if ($LASTEXITCODE -ne 0) { + Complete-Step "FAIL" + Write-Error "Test project build failed." + exit 1 +} + +Complete-Step "OK" + +Start-Step "Running Windows tests with coverage" + +# Create test results directory +if (-not (Test-Path $testResultsDir)) { + New-Item -ItemType Directory -Path $testResultsDir -Force | Out-Null +} + +# Run tests with TRX logger and coverage collection +$windowsTestResultFile = "$testResultsDir\Windows-TestResults.trx" +$testOutput = dotnet test $testProjectDir -c Release --nologo -v q --no-build ` + --logger "trx;LogFileName=$windowsTestResultFile" ` + --collect:"XPlat Code Coverage" ` + --results-directory "$testResultsDir" 2>&1 | Out-String + +if ($LASTEXITCODE -ne 0) { + Complete-Step "FAIL" + Write-Host $testOutput + Write-Error "Windows tests failed. Aborting release process." + exit 1 +} + +# Parse test results +if ($testOutput -match "Passed:\s*(\d+)") { $script:windowsTestsPassed = [int]$Matches[1] } else { $script:windowsTestsPassed = 0 } +if ($testOutput -match "Failed:\s*(\d+)") { $script:windowsTestsFailed = [int]$Matches[1] } else { $script:windowsTestsFailed = 0 } +if ($testOutput -match "Skipped:\s*(\d+)") { $script:windowsTestsSkipped = [int]$Matches[1] } else { $script:windowsTestsSkipped = 0 } +$script:windowsTestsTotal = $script:windowsTestsPassed + $script:windowsTestsFailed + $script:windowsTestsSkipped + +Write-Host " Tests: $script:windowsTestsPassed passed, $script:windowsTestsFailed failed, $script:windowsTestsSkipped skipped" -ForegroundColor Green +Write-Host " Results: $windowsTestResultFile" -ForegroundColor Gray +Complete-Step "OK" + +# ============================================================================== +# CODE COVERAGE CHECK +# ============================================================================== + +Start-Step "Analyzing code coverage" + +# Find the coverage file (Coverlet creates it in a GUID folder) +$coverageFile = Get-ChildItem -Path $testResultsDir -Filter "coverage.cobertura.xml" -Recurse | + Sort-Object LastWriteTime -Descending | + Select-Object -First 1 + +if ($coverageFile) { + # Parse coverage from Cobertura XML + [xml]$coverageXml = Get-Content $coverageFile.FullName + $lineRate = [double]$coverageXml.coverage.'line-rate' * 100 + $branchRate = [double]$coverageXml.coverage.'branch-rate' * 100 + $script:codeCoverage = [math]::Round($lineRate, 2) + $script:branchCoverage = [math]::Round($branchRate, 2) + + Write-Host " Line coverage: $script:codeCoverage%" -ForegroundColor $(if ($script:codeCoverage -ge $coverageThreshold) { "Green" } else { "Yellow" }) + Write-Host " Branch coverage: $script:branchCoverage%" -ForegroundColor Gray + Write-Host " Report: $($coverageFile.FullName)" -ForegroundColor Gray + + # Check threshold + if ($coverageThreshold -gt 0 -and $script:codeCoverage -lt $coverageThreshold) { + Complete-Step "FAIL" + Write-Error "Code coverage ($script:codeCoverage%) is below threshold ($coverageThreshold%)." + exit 1 + } + Complete-Step "OK" +} +else { + Write-Host " Coverage file not found (coverlet may not be installed)" -ForegroundColor Yellow + $script:codeCoverage = 0 + $script:branchCoverage = 0 + Complete-Step "SKIP" +} + +# ============================================================================== +# LINUX BUILD & TEST (Docker) +# ============================================================================== + +Start-Step "Checking Docker availability" +docker info 2>&1 | Out-Null +if ($LASTEXITCODE -ne 0) { + Complete-Step "FAIL" + Write-Error "Docker is not running. Start Docker Desktop and try again." + exit 1 +} + +# Extract Docker version info +$dockerVersion = docker version --format '{{.Server.Version}}' 2>$null +$dockerOS = docker version --format '{{.Server.Os}}' 2>$null +Write-Host " Docker: $dockerVersion ($dockerOS)" +Complete-Step "OK" + +# Convert Windows path to Docker-compatible path +$dockerRepoPath = $repoRoot -replace '\\', '/' -replace '^([A-Za-z]):', '/$1' + +# Build Docker image name from target framework +$dockerImage = "mcr.microsoft.com/dotnet/sdk:$dotnetVersion" + +Start-Step "Building & testing in Linux ($dockerImage)" +# Build main project, then test project, then run tests with TRX logger - all in one container run +$linuxTestResultFile = "TestResults/Linux-TestResults.trx" +$dockerTestOutput = docker run --rm -v "${dockerRepoPath}:/src" -w /src $dockerImage ` + sh -c "dotnet build src/MaksIT.Core -c Release --nologo -v q && dotnet build src/MaksIT.Core.Tests -c Release --nologo -v q && dotnet test src/MaksIT.Core.Tests -c Release --nologo -v q --no-build --logger 'trx;LogFileName=/src/$linuxTestResultFile'" 2>&1 | Out-String + +if ($LASTEXITCODE -ne 0) { + Complete-Step "FAIL" + Write-Host $dockerTestOutput + Write-Error "Linux build/tests failed. Aborting release process." + exit 1 +} + +# Parse Docker test results +if ($dockerTestOutput -match "Passed:\s*(\d+)") { $script:linuxTestsPassed = [int]$Matches[1] } else { $script:linuxTestsPassed = 0 } +if ($dockerTestOutput -match "Failed:\s*(\d+)") { $script:linuxTestsFailed = [int]$Matches[1] } else { $script:linuxTestsFailed = 0 } +if ($dockerTestOutput -match "Skipped:\s*(\d+)") { $script:linuxTestsSkipped = [int]$Matches[1] } else { $script:linuxTestsSkipped = 0 } +$script:linuxTestsTotal = $script:linuxTestsPassed + $script:linuxTestsFailed + $script:linuxTestsSkipped + +Write-Host " Tests: $script:linuxTestsPassed passed, $script:linuxTestsFailed failed, $script:linuxTestsSkipped skipped" -ForegroundColor Green +Complete-Step "OK" + +# Clean up test results directory +if (Test-Path $testResultsDir) { + Remove-Item -Path $testResultsDir -Recurse -Force -ErrorAction SilentlyContinue +} + +# ============================================================================== +# PACK (rebuild for Windows after Docker overwrote bin/obj) +# ============================================================================== + +Start-Step "Rebuilding for package (Windows)" +# Docker tests may have overwritten bin/obj with Linux artifacts, rebuild for Windows +dotnet build $projectDir -c Release --nologo -v q + +if ($LASTEXITCODE -ne 0) { + Complete-Step "FAIL" + Write-Error "Rebuild for packaging failed." + exit 1 +} + +Complete-Step "OK" + +Start-Step "Creating NuGet package" +dotnet pack $projectDir -c Release --no-build --nologo -v q + +if ($LASTEXITCODE -ne 0) { + Complete-Step "FAIL" + Write-Error "dotnet pack failed." + exit 1 +} + +# Look for the .nupkg and .snupkg files +$packageFile = Get-ChildItem -Path $outputDir -Filter "*.nupkg" -Recurse | Sort-Object LastWriteTime -Descending | Select-Object -First 1 +$symbolsFile = Get-ChildItem -Path $outputDir -Filter "*.snupkg" -Recurse | Sort-Object LastWriteTime -Descending | Select-Object -First 1 + +if (-not $packageFile) { + Complete-Step "FAIL" + Write-Error "Package creation failed. No .nupkg file found." + exit 1 +} + +# Get package size +$packageSize = "{0:N2} KB" -f ($packageFile.Length / 1KB) +Write-Host " Package: $($packageFile.Name) ($packageSize)" +if ($symbolsFile) { + $symbolsSize = "{0:N2} KB" -f ($symbolsFile.Length / 1KB) + Write-Host " Symbols: $($symbolsFile.Name) ($symbolsSize)" +} + +Complete-Step "OK" + +# ============================================================================== +# DRY RUN SUMMARY / CONFIRMATION PROMPT +# ============================================================================== + +if ($DryRun) { + # Show timing summary + Show-TimingSummary + + Write-Host "" + Write-Host "==========================================" + Write-Host "DRY RUN COMPLETE - v$version" + Write-Host "==========================================" + Write-Host "" + Write-Host "Validation Results:" + Write-Host " [OK] Prerequisites (dotnet, git, gh, docker)" + Write-Host " [OK] Working directory clean" -ForegroundColor Green + Write-Host " [OK] Tag $tag on branch '$releaseBranch'" -ForegroundColor Green + Write-Host " [OK] GitHub CLI authenticated" -ForegroundColor Green + if ($hasVulnerabilities) { + Write-Host " [WARN] Vulnerable packages found (review recommended)" -ForegroundColor Yellow + } + else { + Write-Host " [OK] No vulnerable packages" -ForegroundColor Green + } + Write-Host "" + Write-Host "Build Information:" + Write-Host " Target framework: $targetFramework" + Write-Host " Package: $($packageFile.Name) ($packageSize)" + Write-Host " Release commit: $shortCommit (tag $tag)" -ForegroundColor Green + Write-Host "" + Write-Host "Test Results:" + Write-Host " Windows: $script:windowsTestsPassed passed, $script:windowsTestsFailed failed, $script:windowsTestsSkipped skipped" -ForegroundColor Green + Write-Host " Linux: $script:linuxTestsPassed passed, $script:linuxTestsFailed failed, $script:linuxTestsSkipped skipped" -ForegroundColor Green + $totalTests = $script:windowsTestsTotal + $script:linuxTestsTotal + Write-Host " Total: $totalTests tests across 2 platforms" -ForegroundColor Cyan + if ($script:codeCoverage -gt 0) { + $coverageColor = if ($coverageThreshold -gt 0 -and $script:codeCoverage -ge $coverageThreshold) { "Green" } elseif ($coverageThreshold -gt 0) { "Yellow" } else { "Cyan" } + Write-Host " Coverage: $script:codeCoverage% line, $script:branchCoverage% branch" -ForegroundColor $coverageColor + if ($coverageThreshold -gt 0) { + Write-Host " Threshold: $coverageThreshold% ($(if ($script:codeCoverage -ge $coverageThreshold) { 'PASSED' } else { 'FAILED' }))" -ForegroundColor $coverageColor + } + } + Write-Host "" + Write-Host "Pending Features:" + if ($packageSigningEnabled -and $packageSigningCertPath -and (Test-Path $packageSigningCertPath)) { + Write-Host " [READY] Package Signing - Certificate configured" -ForegroundColor Green + } + else { + Write-Host " [TODO] Package Signing - Enable in scriptsettings.json" -ForegroundColor DarkGray + } + if ($emailEnabled -and $emailSmtpServer -and $emailFrom -and $emailTo) { + Write-Host " [READY] Email Notification - SMTP configured" -ForegroundColor Green + } + else { + Write-Host " [TODO] Email Notification - Enable in scriptsettings.json" -ForegroundColor DarkGray + } + if ($coverageThreshold -gt 0) { + Write-Host " [ACTIVE] Code Coverage - Threshold: $coverageThreshold%" -ForegroundColor Green + } + else { + Write-Host " [INFO] Code Coverage - Collected but no threshold set" -ForegroundColor DarkGray + } + Write-Host "" + Write-Host "If this were a real release, it would:" + Write-Host " 1. Push $($packageFile.Name) to NuGet.org" + Write-Host " 2. Push tag $tag to remote (if not there)" + Write-Host " 3. Create GitHub release with assets" + Write-Host "" + Write-Host "Run without -DryRun to perform the actual release." -ForegroundColor Green + exit 0 +} + +Write-Host "" +Write-Host "==========================================" +Write-Host "Ready to release v$version" +Write-Host "==========================================" +Write-Host "This will:" +Write-Host " 1. Push package to NuGet.org" +Write-Host " 2. Create GitHub release with tag v$version" +Write-Host "" +$confirm = Read-Host "Proceed with release? (y/n)" +if ($confirm -ne 'y' -and $confirm -ne 'Y') { + Write-Host "Release cancelled." + exit 0 +} + +# ============================================================================== +# NUGET PUBLISH +# ============================================================================== + +Start-Step "Pushing to NuGet.org" +dotnet nuget push $packageFile.FullName -k $nugetApiKey -s $nugetSource --skip-duplicate + +if ($LASTEXITCODE -ne 0) { + Complete-Step "FAIL" + Write-Error "Failed to push the package to NuGet." + exit 1 +} + +Complete-Step "OK" + +# ============================================================================== +# GITHUB RELEASE +# ============================================================================== + +Start-Step "Creating GitHub release" +Write-Host " Tag: $tag -> $shortCommit" + +# Check if GitHub release already exists (re-release scenario) +$existingRelease = gh release list --repo $gitHubRepo | Select-String "^$tag\s" +if ($existingRelease) { + Write-Host " Deleting existing GitHub release for re-release..." -ForegroundColor Yellow + gh release delete $tag --repo $gitHubRepo --yes + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to delete existing release $tag." + exit 1 + } +} + +# Push tag to remote if not already there +$remoteTag = git ls-remote --tags origin $tag 2>$null +if (-not $remoteTag) { + Write-Host " Pushing tag to remote..." + git push origin $tag + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to push git tag." + exit 1 + } +} +else { + Write-Host " Tag already on remote" +} + +# Build release assets list +$releaseAssets = @($packageFile.FullName) +if ($symbolsFile) { + $releaseAssets += $symbolsFile.FullName +} + +# Create GitHub release with assets +Write-Host "Creating GitHub release: $releaseName" + +gh release create $tag @releaseAssets ` + --repo $gitHubRepo ` + --title "$releaseName" ` + --notes "$releaseNotes" + +if ($LASTEXITCODE -ne 0) { + Complete-Step "FAIL" + Write-Error "Failed to create GitHub release for tag $tag." + exit 1 +} + +Complete-Step "OK" + +# ============================================================================== +# COMPLETION +# ============================================================================== + +# Show timing summary +Show-TimingSummary + +Write-Host "" +Write-Host "==========================================" +Write-Host "RELEASE COMPLETED SUCCESSFULLY!" -ForegroundColor Green +Write-Host "==========================================" +Write-Host "" +Write-Host "Version: $version" +Write-Host "Package: $($packageFile.Name) ($packageSize)" +Write-Host "" +Write-Host "Test Results:" +Write-Host " Windows: $script:windowsTestsPassed passed" -ForegroundColor Green +Write-Host " Linux: $script:linuxTestsPassed passed" -ForegroundColor Green +if ($script:codeCoverage -gt 0) { + Write-Host " Coverage: $script:codeCoverage% line, $script:branchCoverage% branch" -ForegroundColor Cyan +} + +Write-Host "" +Write-Host "Links:" +Write-Host " NuGet: https://www.nuget.org/packages/MaksIT.Core/$version" +Write-Host " GitHub: https://github.com/$gitHubRepo/releases/tag/$tag" +Write-Host "" + +# ============================================================================== +# PACKAGE SIGNING (TODO) +# ============================================================================== + +if ($packageSigningEnabled -and $packageSigningCertPath -and (Test-Path $packageSigningCertPath)) { + Start-Step "Signing NuGet package" + try { + $signArgs = @( + "nuget", "sign", $packageFile.FullName, + "--certificate-path", $packageSigningCertPath, + "--timestamper", $packageSigningTimestamper + ) + if ($packageSigningCertPassword) { + $signArgs += "--certificate-password" + $signArgs += $packageSigningCertPassword + } + & dotnet @signArgs + if ($LASTEXITCODE -eq 0) { + Write-Host " Package signed successfully" -ForegroundColor Green + Complete-Step "OK" + } + else { + Write-Host " Package signing failed (continuing without signature)" -ForegroundColor Yellow + Complete-Step "WARN" + } + } + catch { + Write-Host " Package signing error: $_" -ForegroundColor Yellow + Complete-Step "WARN" + } +} +else { + Write-Host "" + Write-Host "[TODO] Package Signing - Not configured" -ForegroundColor DarkGray + Write-Host " Set packageSigning.enabled = true in scriptsettings.json" -ForegroundColor DarkGray +} + +# ============================================================================== +# EMAIL NOTIFICATION (TODO) +# ============================================================================== + +if ($emailEnabled -and $emailSmtpServer -and $emailFrom -and $emailTo) { + Start-Step "Sending email notification" + try { + $emailBody = @" +MaksIT.Core Release $version completed successfully. + +Package: $($packageFile.Name) +NuGet: https://www.nuget.org/packages/MaksIT.Core/$version +GitHub: https://github.com/$gitHubRepo/releases/tag/$tag + +Test Results: +- Windows: $script:windowsTestsPassed passed +- Linux: $script:linuxTestsPassed passed +"@ + $emailParams = @{ + From = $emailFrom + To = $emailTo + Subject = "MaksIT.Core v$version Released" + Body = $emailBody + SmtpServer = $emailSmtpServer + Port = $emailSmtpPort + UseSsl = $emailUseSsl + } + + # Add credentials if SMTP password is set + if ($smtpPassword) { + $securePassword = ConvertTo-SecureString $smtpPassword -AsPlainText -Force + $credential = New-Object System.Management.Automation.PSCredential($emailFrom, $securePassword) + $emailParams.Credential = $credential + } + + Send-MailMessage @emailParams + Write-Host " Email sent to $emailTo" -ForegroundColor Green + Complete-Step "OK" + } + catch { + Write-Host " Email sending failed: $_" -ForegroundColor Yellow + Complete-Step "WARN" + } +} +else { + Write-Host "" + Write-Host "[TODO] Email Notification - Not configured" -ForegroundColor DarkGray + Write-Host " Set emailNotification.enabled = true in scriptsettings.json" -ForegroundColor DarkGray +} + +# ============================================================================== +# CODE COVERAGE STATUS +# ============================================================================== + +Write-Host "" +if ($script:codeCoverage -gt 0) { + if ($coverageThreshold -gt 0) { + Write-Host "[ACTIVE] Code Coverage: $script:codeCoverage% (threshold: $coverageThreshold%)" -ForegroundColor Green + } + else { + Write-Host "[INFO] Code Coverage: $script:codeCoverage% (no threshold enforced)" -ForegroundColor Cyan + Write-Host " Set `$coverageThreshold > 0 to enforce minimum coverage" -ForegroundColor DarkGray + } +} +else { + Write-Host "[SKIP] Code Coverage - Not collected" -ForegroundColor DarkGray + Write-Host " Ensure coverlet.collector is installed in test project" -ForegroundColor DarkGray +} + +Write-Host "" + +# Open release pages in browser +Write-Host "Opening release pages in browser..." +Start-Process "https://www.nuget.org/packages/MaksIT.Core/$version" +Start-Process "https://github.com/$gitHubRepo/releases/tag/$tag" diff --git a/src/Release-NuGetPackage.sh b/src/Release-NuGetPackage.sh deleted file mode 100644 index 25e6691..0000000 --- a/src/Release-NuGetPackage.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh - -# Retrieve the API key from the environment variable -apiKey=$NUGET_MAKS_IT -if [ -z "$apiKey" ]; then - echo "Error: API key not found in environment variable NUGET_MAKS_IT." - exit 1 -fi - -# NuGet source -nugetSource="https://api.nuget.org/v3/index.json" - -# Define paths -scriptDir=$(dirname "$0") -solutionDir=$(realpath "$scriptDir") -projectDir="$solutionDir/MaksIT.Core" -outputDir="$projectDir/bin/Release" - -# Clean previous builds -echo "Cleaning previous builds..." -dotnet clean "$projectDir" -c Release - -# Build the project -echo "Building the project..." -dotnet build "$projectDir" -c Release - -# Pack the NuGet package -echo "Packing the project..." -dotnet pack "$projectDir" -c Release --no-build - -# Look for the .nupkg file -packageFile=$(find "$outputDir" -name "*.nupkg" -print0 | xargs -0 ls -t | head -n 1) - -if [ -n "$packageFile" ]; then - echo "Package created successfully: $packageFile" - - # Push the package to NuGet - echo "Pushing the package to NuGet..." - dotnet nuget push "$packageFile" -k "$apiKey" -s "$nugetSource" --skip-duplicate - - if [ $? -eq 0 ]; then - echo "Package pushed successfully." - else - echo "Failed to push the package." - fi -else - echo "Package creation failed. No .nupkg file found." - exit 1 -fi diff --git a/src/changelogsettings.json b/src/changelogsettings.json new file mode 100644 index 0000000..424cc93 --- /dev/null +++ b/src/changelogsettings.json @@ -0,0 +1,105 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$comment": "Configuration for Generate-Changelog.ps1 (AI-assisted changelog generation)", + + "ollama": { + "enabled": true, + "apiUrl": "http://localhost:11434", + "defaultTimeout": 0, + "defaultContextWindow": 0 + }, + + "changelog": { + "debug": true, + "enableRAG": true, + "similarityThreshold": 0.65, + + "csprojPath": "MaksIT.Core/MaksIT.Core.csproj", + "outputFile": "../CHANGELOG.md", + "licensePath": "../LICENSE.md", + "fileExtension": ".cs", + "excludePatterns": ["Tests:", "Tests.cs", ".Tests."], + + "models": { + "analyze": { + "name": "qwen2.5-coder:7b-instruct-q6_K", + "context": 0, + "maxTokens": 0, + "description": "Pass 1: Code commit analysis (7B, fast)" + }, + "reason": { + "name": "qwen2.5:7b-instruct-q8_0", + "context": 0, + "maxTokens": 0, + "temperature": 0.1, + "description": "Pass 2: Consolidation (7B, fast)" + }, + "write": { + "name": "qwen2.5:7b-instruct-q8_0", + "context": 0, + "maxTokens": 0, + "description": "Pass 3: Formatting (7B, fast)" + }, + "embed": { + "name": "mxbai-embed-large", + "description": "RAG: Commit clustering" + } + }, + + "prompts": { + "analyze": [ + "Convert code changes to changelog entries. Include ALL items.", + "", + "Changes:", + "{{changes}}", + "", + "RULES:", + "1. Create ONE bullet point per item", + "2. Include method names mentioned (CreateMutex, ResolveFolderPath, etc.)", + "3. New classes = \"Added [class] for [purpose]\"", + "4. New methods = \"Added [method] to [class]\"", + "5. Deleted files = \"Removed [class/feature]\"", + "6. Exception handling = \"Improved error handling in [class]\"", + "", + "Output bullet points for each change:" + ], + + "reason": [ + "Keep all important details from this changelog.", + "", + "Input:", + "{{input}}", + "", + "RULES:", + "1. KEEP specific method names and class names", + "2. KEEP all distinct features - do not over-consolidate", + "3. Merge ONLY if items are nearly identical", + "4. DO NOT invent new information", + "5. Output 3-10 bullet points", + "", + "Output:" + ], + + "format": [ + "Categorize these items under the correct changelog headers.", + "", + "Items:", + "{{items}}", + "", + "HEADERS (use exactly as shown):", + "### Added", + "### Changed", + "### Fixed", + "### Removed", + "", + "CATEGORIZATION RULES:", + "- \"Added [class/method]\" -> ### Added", + "- \"Improved...\" or \"Enhanced...\" -> ### Changed", + "- \"Fixed...\" -> ### Fixed", + "- \"Removed...\" -> ### Removed", + "", + "Output each item under correct header. Omit empty sections:" + ] + } + } +} diff --git a/src/scriptsettings.json b/src/scriptsettings.json new file mode 100644 index 0000000..f61066c --- /dev/null +++ b/src/scriptsettings.json @@ -0,0 +1,41 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$comment": "Configuration for Release-NuGetPackage.ps1. Secrets are stored in environment variables, not here.", + + "release": { + "branch": "main", + "$comment": "Tag must be on this branch to release. Set to empty string to allow any branch." + }, + + "paths": { + "changelogPath": "../CHANGELOG.md" + }, + + "environmentVariables": { + "$comment": "Required environment variables (store secrets here, not in this file)", + "nugetApiKey": "NUGET_MAKS_IT", + "githubToken": "GITHUB_MAKS_IT_COM", + "signingCertPassword": "SIGNING_CERT_PASSWORD", + "smtpPassword": "SMTP_PASSWORD" + }, + + "qualityGates": { + "coverageThreshold": 0, + "failOnVulnerabilities": true + }, + + "packageSigning": { + "enabled": false, + "certificatePath": "", + "timestampServer": "http://timestamp.digicert.com" + }, + + "emailNotification": { + "enabled": false, + "smtpServer": "", + "smtpPort": 587, + "useSsl": true, + "from": "", + "to": "" + } +}