Initial version

This commit is contained in:
2026-02-27 21:12:55 +01:00
parent d2bbbd8bc7
commit a73beb6ed5
184 changed files with 90370 additions and 63 deletions

27
.gitignore vendored
View File

@@ -2,7 +2,7 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
@@ -83,8 +83,6 @@ StyleCopReport.xml
*.pgc
*.pgd
*.rsp
# but not Directory.Build.rsp, as it configures directory-level build defaults
!Directory.Build.rsp
*.sbr
*.tlb
*.tli
@@ -209,6 +207,9 @@ PublishScripts/
*.nuget.props
*.nuget.targets
# Nuget personal access tokens and Credentials
nuget.config
# Microsoft Azure Build Output
csx/
*.build.csdef
@@ -297,17 +298,6 @@ node_modules/
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
*.dsw
*.dsp
# Visual Studio 6 technical files
*.ncb
*.aps
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
@@ -364,9 +354,6 @@ ASALocalRun/
# Local History for Visual Studio
.localhistory/
# Visual Studio History (VSHistory) files
.vshistory/
# BeatPulse healthcheck temp database
healthchecksdb
@@ -398,6 +385,7 @@ FodyWeavers.xsd
*.msp
# JetBrains Rider
.idea/
*.sln.iml
# ---> VisualStudioCode
@@ -406,11 +394,8 @@ FodyWeavers.xsd
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
*.code-workspace
# Local History for Visual Studio Code
.history/
# Built Visual Studio Code Extensions
*.vsix

View File

@@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net9</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\EonaCat.LogStack\EonaCat.LogStack.csproj" />
</ItemGroup>
</Project>

31
ConsoleApp1/Program.cs Normal file
View File

@@ -0,0 +1,31 @@
using EonaCat.LogStack.Configuration;
using EonaCat.LogStack.Core;
var logger = new LogBuilder("MyApp")
.WithMinimumLevel(LogLevel.Information)
.WriteToConsole()
.WriteToFile("C:\\tesss", maxFileSize: 50 * 1024 * 1024)
//.WriteToJsonFile("./logs", maxFileSize: 50 * 1024 * 1024)
//.WriteToHttp("https://127.0.0.1")
//.WriteToUdp("127.0.0.1", 514)
//.WriteToTcp("127.0.0.1", 514)
//.WriteToDatabase(null)
//.WriteToDiscord("https://discord.com/api/webhooks/...")
//.WriteToMicrosoftTeams("https://outlook.office.com/webhook/...")
//.WriteToElasticSearch("http://localhost:9200/logs")
//.WriteToGraylogFlow(null)
//.WriteToZabbixFlow(null)
.BoostWithCorrelationId()
.BoostWithProcessId()
.Build();
while (true)
{
logger.Information("Application started");
logger.Error(new Exception("Nerd!"), "Something went wrong");
await Task.Delay(1);
}
await logger.DisposeAsync(); // Flushes all logs

View File

@@ -0,0 +1,39 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
<PackageId>EonaCat.LogStack.LogClient</PackageId>
<Version>0.0.1</Version>
<Authors>EonaCat (Jeroen Saey)</Authors>
<Description>Logging client for the EonaCat Logger LogServer LogStack</Description>
<PackageTags>logging;monitoring;analytics;diagnostics</PackageTags>
<Copyright>EonaCat (Jeroen Saey)</Copyright>
<PackageIcon>icon.png</PackageIcon>
<PackageReadmeFile>readme.md</PackageReadmeFile>
<RepositoryUrl>https://git.saey.me/EonaCat/EonaCat.LogStack.LogClient</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageLicenseFile>LICENSE</PackageLicenseFile>
</PropertyGroup>
<ItemGroup>
<None Include="..\EonaCat.LogStack\icon.png">
<Pack>True</Pack>
<PackagePath>\</PackagePath>
</None>
<None Include="..\LICENSE">
<Pack>True</Pack>
<PackagePath>\</PackagePath>
</None>
</ItemGroup>
<ItemGroup>
<PackageReference Include="System.Net.Http.Json" Version="10.0.3" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\EonaCat.LogStack\EonaCat.LogStack.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="readme.md">
<Pack>True</Pack>
<PackagePath>\</PackagePath>
</None>
</ItemGroup>
</Project>

View File

@@ -0,0 +1,204 @@
using EonaCat.LogStack.LogClient.Models;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Json;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.LogClient
{
public class LogCentralClient : IDisposable
{
private readonly HttpClient _httpClient;
private readonly LogCentralOptions _options;
private readonly ConcurrentQueue<LogEntry> _logQueue;
private readonly Timer _flushTimer;
private readonly SemaphoreSlim _flushSemaphore;
private bool _disposed;
public LogCentralClient(LogCentralOptions options)
{
_options = options ?? throw new ArgumentNullException(nameof(options));
_httpClient = new HttpClient { BaseAddress = new Uri(_options.ServerUrl) };
_httpClient.DefaultRequestHeaders.Add("X-API-Key", _options.ApiKey);
_logQueue = new ConcurrentQueue<LogEntry>();
_flushSemaphore = new SemaphoreSlim(1, 1);
_flushTimer = new Timer(async _ => await FlushAsync(), null,
TimeSpan.FromSeconds(_options.FlushIntervalSeconds),
TimeSpan.FromSeconds(_options.FlushIntervalSeconds));
}
public async Task LogAsync(LogEntry entry)
{
entry.ApplicationName = _options.ApplicationName;
entry.ApplicationVersion = _options.ApplicationVersion;
entry.Environment = _options.Environment;
entry.Timestamp = DateTime.UtcNow;
entry.MachineName ??= Environment.MachineName;
entry.Category ??= entry.Category ?? "Default";
entry.Message ??= entry.Message ?? "";
_logQueue.Enqueue(entry);
if (_logQueue.Count >= _options.BatchSize)
{
await FlushAsync();
}
}
public async Task LogExceptionAsync(Exception ex, string message = "",
Dictionary<string, object>? properties = null)
{
await LogAsync(new LogEntry
{
Level = (int)LogLevel.Error,
Category = "Exception",
Message = message,
Exception = ex.ToString(),
StackTrace = ex.StackTrace,
Properties = properties
});
}
public async Task LogSecurityEventAsync(string eventType, string message,
Dictionary<string, object>? properties = null)
{
await LogAsync(new LogEntry
{
Level = (int)LogLevel.Security,
Category = "Security",
Message = $"[{eventType}] {message}",
Properties = properties
});
}
public async Task LogAnalyticsAsync(string eventName,
Dictionary<string, object>? properties = null)
{
await LogAsync(new LogEntry
{
Level = (int)LogLevel.Analytics,
Category = "Analytics",
Message = eventName,
Properties = properties
});
}
private async Task FlushAsync()
{
if (_logQueue.IsEmpty)
{
return;
}
await _flushSemaphore.WaitAsync();
try
{
var batch = new List<LogEntry>();
while (batch.Count < _options.BatchSize && _logQueue.TryDequeue(out var entry))
{
batch.Add(entry);
}
if (batch.Count > 0)
{
await SendBatchAsync(batch);
}
}
finally
{
_flushSemaphore.Release();
}
}
private async Task SendBatchAsync(List<LogEntry> entries)
{
try
{
// Map EF entities to DTOs for API
var dtos = entries.Select(e => new LogEntryDto
{
Id = e.Id,
Timestamp = e.Timestamp,
ApplicationName = e.ApplicationName,
ApplicationVersion = e.ApplicationVersion,
Environment = e.Environment,
MachineName = e.MachineName,
Level = e.Level,
Category = e.Category,
Message = e.Message,
Exception = e.Exception,
StackTrace = e.StackTrace,
Properties = e.Properties,
UserId = e.UserId,
SessionId = e.SessionId,
RequestId = e.RequestId,
CorrelationId = e.CorrelationId
}).ToList();
var response = await _httpClient.PostAsJsonAsync("/api/logs/batch", dtos);
response.EnsureSuccessStatusCode();
}
catch (Exception ex)
{
if (_options.EnableFallbackLogging)
{
Console.WriteLine($"[LogCentral] Failed to send logs: {ex.Message}");
}
foreach (var entry in entries)
{
_logQueue.Enqueue(entry);
}
}
}
public async Task FlushAndDisposeAsync()
{
await FlushAsync();
Dispose();
}
public void Dispose()
{
if (_disposed)
{
return;
}
_flushTimer?.Dispose();
FlushAsync().GetAwaiter().GetResult();
_httpClient?.Dispose();
_flushSemaphore?.Dispose();
_disposed = true;
GC.SuppressFinalize(this);
}
}
public class LogEntryDto
{
public string Id { get; set; } = Guid.NewGuid().ToString();
public DateTime Timestamp { get; set; }
public string ApplicationName { get; set; } = default!;
public string ApplicationVersion { get; set; } = default!;
public string Environment { get; set; } = default!;
public string MachineName { get; set; } = default!;
public int Level { get; set; }
public string Category { get; set; } = default!;
public string Message { get; set; } = default!;
public string? Exception { get; set; }
public string? StackTrace { get; set; }
public Dictionary<string, object>? Properties { get; set; }
public string? UserId { get; set; }
public string? SessionId { get; set; }
public string? RequestId { get; set; }
public string? CorrelationId { get; set; }
}
}

View File

@@ -0,0 +1,60 @@
using EonaCat.LogStack.Configuration;
using EonaCat.LogStack.LogClient.Models;
using System;
using System.Collections.Generic;
namespace EonaCat.LogStack.LogClient
{
public class LogCentralEonaCatAdapter : IDisposable
{
private readonly LogCentralClient _client;
private LogBuilder _logBuilder;
public LogCentralEonaCatAdapter(LogBuilder logBuilder, LogCentralClient client)
{
_client = client;
_logBuilder.OnLog += LogSettings_OnLog;
}
private void LogSettings_OnLog(object sender, LogMessage e)
{
var entry = new LogEntry
{
Level = (int)MapLogLevel(e.Level),
Category = e.Category ?? "General",
Message = e.Message,
Properties = new Dictionary<string, object>
{
{ "Source", e.Origin ?? "Unknown" }
}
};
if (e.Exception != null)
{
entry.Exception = e.Exception.ToString();
entry.StackTrace = e.Exception.StackTrace;
}
_client.LogAsync(entry).ConfigureAwait(false);
}
private static LogLevel MapLogLevel(Core.LogLevel logType)
{
return logType switch
{
Core.LogLevel.Trace => LogLevel.Trace,
Core.LogLevel.Debug => LogLevel.Debug,
Core.LogLevel.Information => LogLevel.Information,
Core.LogLevel.Warning => LogLevel.Warning,
Core.LogLevel.Error => LogLevel.Error,
Core.LogLevel.Critical => LogLevel.Critical,
_ => LogLevel.Information
};
}
public void Dispose()
{
_logBuilder.OnLog -= LogSettings_OnLog;
GC.SuppressFinalize(this);
}
}
}

View File

@@ -0,0 +1,18 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace EonaCat.LogStack.LogClient
{
public class LogCentralOptions
{
public string ServerUrl { get; set; } = "http://localhost:5000";
public string ApiKey { get; set; } = string.Empty;
public string ApplicationName { get; set; } = string.Empty;
public string ApplicationVersion { get; set; } = "1.0.0";
public string Environment { get; set; } = "Production";
public int BatchSize { get; set; } = 50;
public int FlushIntervalSeconds { get; set; } = 5;
public bool EnableFallbackLogging { get; set; } = true;
}
}

View File

@@ -0,0 +1,15 @@
namespace EonaCat.LogStack.LogClient
{
public enum LogLevel
{
Trace = 0,
Debug = 1,
Information = 2,
Warning = 3,
Error = 4,
Critical = 5,
Traffic = 6,
Security = 7,
Analytics = 8
}
}

View File

@@ -0,0 +1,64 @@
using EonaCat.Json;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations.Schema;
using System.Text;
namespace EonaCat.LogStack.LogClient.Models
{
public class LogEntry
{
public string Id { get; set; } = Guid.NewGuid().ToString();
public DateTime Timestamp { get; set; }
public string ApplicationName { get; set; } = default!;
public string ApplicationVersion { get; set; } = default!;
public string Environment { get; set; } = default!;
public string MachineName { get; set; } = default!;
public int Level { get; set; }
public string Category { get; set; } = default!;
public string Message { get; set; } = default!;
public string? Exception { get; set; }
public string? StackTrace { get; set; }
[Column(TypeName = "TEXT")]
public string? PropertiesJson { get; set; }
[NotMapped]
public Dictionary<string, object>? Properties
{
get => string.IsNullOrEmpty(PropertiesJson)
? null
: JsonHelper.ToObject<Dictionary<string, object>>(PropertiesJson);
set => PropertiesJson = value == null ? null : JsonHelper.ToJson(value);
}
public string? UserId { get; set; }
public string? SessionId { get; set; }
public string? RequestId { get; set; }
public string? CorrelationId { get; set; }
public static LogEntryDto ToDto(LogEntry entry) => new LogEntryDto()
{
Id = entry.Id,
Timestamp = entry.Timestamp,
ApplicationName = entry.ApplicationName,
ApplicationVersion = entry.ApplicationVersion,
Environment = entry.Environment,
MachineName = entry.MachineName,
Level = entry.Level,
Category = entry.Category,
Message = entry.Message,
Exception = entry.Exception,
StackTrace = entry.StackTrace,
Properties = entry.Properties,
UserId = entry.UserId,
SessionId = entry.SessionId,
RequestId = entry.RequestId,
CorrelationId = entry.CorrelationId
};
}
}

View File

@@ -0,0 +1,389 @@
# EonaCat.LogStack.LogClient
### Client Installation
#### Via NuGet Package Manager:
```bash
dotnet add package EonaCat.LogStack.LogClient
```
#### Via Package Manager Console:
```powershell
Install-Package EonaCat.LogStack.LogClient
```
## 📖 Usage Examples
### Basic Setup
```csharp
using EonaCat.LogStack.LogClient;
using EonaCat.LogStack.LogClient.Models;
// Configure the client
var options = new LogCentralOptions
{
ServerUrl = "https://your-logcentral-server.com",
ApiKey = "your-api-key-here",
ApplicationName = "MyAwesomeApp",
ApplicationVersion = "1.0.0",
Environment = "Production",
BatchSize = 50,
FlushIntervalSeconds = 5
};
var logClient = new LogCentralClient(options);
```
### Integration with EonaCat.LogStack
```csharp
using EonaCat.LogStack;
using EonaCat.LogStack.LogClient.Integration;
var loggerSettings = new LoggerSettings();
loggerSettings.UseLocalTime = true;
loggerSettings.Id = "TEST";
var logger = new LogManager(loggerSettings);
// Create the adapter
var adapter = new LogCentralEonaCatAdapter(loggerSettings, logClient);
// Now all EonaCat.LogStack logs will be sent to LogCentral automatically
logger.Log("Application started", LogLevel.Info);
logger.Log("User logged in", LogLevel.Info, "Authentication");
```
### Manual Logging
```csharp
// Simple log
await logClient.LogAsync(new LogEntry
{
Level = LogLevel.Information,
Category = "Startup",
Message = "Application started successfully"
});
// Log with properties
await logClient.LogAsync(new LogEntry
{
Level = LogLevel.Information,
Category = "UserAction",
Message = "User performed action",
UserId = "user123",
Properties = new Dictionary<string, object>
{
["Action"] = "Purchase",
["Amount"] = 99.99,
["ProductId"] = "prod-456"
}
});
// Log exception
try
{
// Your code
throw new Exception("Something went wrong");
}
catch (Exception ex)
{
await logClient.LogExceptionAsync(ex, "Error processing order",
new Dictionary<string, object>
{
["OrderId"] = "12345",
["CustomerId"] = "cust-789"
});
}
```
### Security Event Logging
```csharp
await logClient.LogSecurityEventAsync(
"LoginAttempt",
"Failed login attempt detected",
new Dictionary<string, object>
{
["Username"] = "admin",
["IPAddress"] = "192.168.1.100",
["Attempts"] = 5
}
);
await logClient.LogSecurityEventAsync(
"UnauthorizedAccess",
"Unauthorized API access attempt",
new Dictionary<string, object>
{
["Endpoint"] = "/api/admin/users",
["Method"] = "DELETE",
["UserId"] = "user456"
}
);
```
### Analytics Logging
```csharp
// Track user events
await logClient.LogAnalyticsAsync("PageView",
new Dictionary<string, object>
{
["Page"] = "/products/electronics",
["Duration"] = 45.2,
["Source"] = "Google"
}
);
await logClient.LogAnalyticsAsync("Purchase",
new Dictionary<string, object>
{
["ProductId"] = "prod-123",
["Price"] = 299.99,
["Category"] = "Electronics",
["PaymentMethod"] = "CreditCard"
}
);
await logClient.LogAnalyticsAsync("FeatureUsage",
new Dictionary<string, object>
{
["Feature"] = "DarkMode",
["Enabled"] = true,
["Platform"] = "iOS"
}
);
```
### ASP.NET Core Integration
```csharp
// Program.cs or Startup.cs
public class Program
{
public static void Main(string[] args)
{
var builder = WebApplication.CreateBuilder(args);
// Register LogCentral
var logCentralOptions = new LogCentralOptions
{
ServerUrl = builder.Configuration["LogCentral:ServerUrl"],
ApiKey = builder.Configuration["LogCentral:ApiKey"],
ApplicationName = "MyWebApp",
ApplicationVersion = "1.0.0",
Environment = builder.Environment.EnvironmentName
};
var logClient = new LogCentralClient(logCentralOptions);
builder.Services.AddSingleton(logClient);
var app = builder.Build();
// Use middleware to log requests
app.Use(async (context, next) =>
{
var requestId = Guid.NewGuid().ToString();
await logClient.LogAsync(new LogEntry
{
Level = LogLevel.Information,
Category = "HTTP",
Message = $"{context.Request.Method} {context.Request.Path}",
RequestId = requestId,
Properties = new Dictionary<string, object>
{
["Method"] = context.Request.Method,
["Path"] = context.Request.Path.Value,
["QueryString"] = context.Request.QueryString.Value
}
});
await next();
});
app.Run();
}
}
```
### Windows Service / Console App
```csharp
using EonaCat.LogStack.LogClient;
using Microsoft.Extensions.Hosting;
public class Worker : BackgroundService
{
private readonly LogCentralClient _logClient;
public Worker(LogCentralClient logClient)
{
_logClient = logClient;
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
await _logClient.LogAsync(new LogEntry
{
Level = LogLevel.Information,
Category = "Service",
Message = "Worker service started"
});
while (!stoppingToken.IsCancellationRequested)
{
try
{
// Your work here
await Task.Delay(1000, stoppingToken);
}
catch (Exception ex)
{
await _logClient.LogExceptionAsync(ex, "Error in worker");
}
}
await _logClient.FlushAndDisposeAsync();
}
}
```
### WPF / WinForms Application
```csharp
public partial class MainWindow : Window
{
private readonly LogCentralClient _logClient;
public MainWindow()
{
InitializeComponent();
_logClient = new LogCentralClient(new LogCentralOptions
{
ServerUrl = "https://logs.mycompany.com",
ApiKey = "your-api-key",
ApplicationName = "MyDesktopApp",
ApplicationVersion = Assembly.GetExecutingAssembly().GetName().Version.ToString(),
Environment = "Production"
});
Application.Current.DispatcherUnhandledException += OnUnhandledException;
}
private async void OnUnhandledException(object sender, DispatcherUnhandledExceptionEventArgs e)
{
await _logClient.LogExceptionAsync(e.Exception, "Unhandled exception in UI");
e.Handled = true;
}
protected override async void OnClosing(CancelEventArgs e)
{
await _logClient.FlushAndDisposeAsync();
base.OnClosing(e);
}
}
```
## 🎯 Advanced Features
### Correlation IDs for Distributed Tracing
```csharp
var correlationId = Guid.NewGuid().ToString();
await logClient.LogAsync(new LogEntry
{
Level = LogLevel.Information,
Category = "OrderProcessing",
Message = "Order created",
CorrelationId = correlationId,
Properties = new Dictionary<string, object> { ["OrderId"] = "12345" }
});
// In another service
await logClient.LogAsync(new LogEntry
{
Level = LogLevel.Information,
Category = "PaymentProcessing",
Message = "Payment processed",
CorrelationId = correlationId, // Same ID
Properties = new Dictionary<string, object> { ["Amount"] = 99.99 }
});
```
### Performance Monitoring
```csharp
var stopwatch = Stopwatch.StartNew();
try
{
// Your operation
await SomeSlowOperation();
}
finally
{
stopwatch.Stop();
await logClient.LogAsync(new LogEntry
{
Level = LogLevel.Information,
Category = "Performance",
Message = "Operation completed",
Properties = new Dictionary<string, object>
{
["Operation"] = "DatabaseQuery",
["DurationMs"] = stopwatch.ElapsedMilliseconds,
["Status"] = "Success"
}
});
}
```
## 📊 Dashboard Features
- **Real-time monitoring**: Auto-refreshes every 30 seconds
- **Advanced search**: Full-text search across all log fields
- **Filtering**: By application, environment, level, date range
- **Charts**: Visual representation of log levels and trends
- **Export**: Download logs as CSV or JSON
- **Alerts**: Configure notifications for critical events (planned)
## 🔒 Security Best Practices
1. **Use HTTPS** for production deployments
2. **Rotate API keys** regularly
3. **Limit API key permissions** by application
4. **Store API keys** in secure configuration (Azure Key Vault, AWS Secrets Manager)
5. **Enable authentication** for dashboard access (add authentication middleware)
## 🚀 Deployment
### Docker Deployment
```dockerfile
FROM mcr.microsoft.com/dotnet/aspnet:8.0
WORKDIR /app
COPY --from=build /app/publish .
ENTRYPOINT ["dotnet", "EonaCat.LogStack.LogServer.dll"]
```
### Azure Deployment
```bash
az webapp create --resource-group MyResourceGroup --plan MyPlan --name logcentral --runtime "DOTNETCORE:8.0"
az webapp deployment source config-zip --resource-group MyResourceGroup --name logcentral --src logcentral.zip
```
## 📈 Scalability
For high-volume applications:
1. Use **Redis** for caching
2. Implement **Elasticsearch** for faster searches
3. Use **message queues** (RabbitMQ, Azure Service Bus) for async processing
4. Partition database by date ranges
5. Implement log archival and retention policies

View File

@@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="OpenTelemetry" Version="1.15.0" />
<PackageReference Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.15.0" />
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.15.0" />
<PackageReference Include="System.Threading.AccessControl" Version="10.0.3" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\EonaCat.LogStack\EonaCat.LogStack.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,33 @@
using EonaCat.LogStack.Configuration;
using EonaCat.LogStack.Core;
using EonaCat.LogStack.Flows;
using OpenTelemetry.Exporter;
using System;
using System.Collections.Generic;
using System.Text;
namespace EonaCat.LogStack.Flows.WindowsEventLog
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public static class EonaCatLogStackExtensions
{
/// <summary>
/// Write to OpenTelemetry
/// </summary>
public static LogBuilder WriteToOpenTelemetry(this LogBuilder logBuilder,
string serviceName,
Uri endpoint,
OtlpExportProtocol protocol = OtlpExportProtocol.Grpc,
LogLevel minimumLevel = LogLevel.Trace)
{
logBuilder.AddFlow(new OpenTelemetryFlow(
serviceName,
endpoint,
protocol,
minimumLevel));
return logBuilder;
}
}
}

View File

@@ -0,0 +1,162 @@
using EonaCat.LogStack.Core;
using Microsoft.Extensions.Logging;
using OpenTelemetry.Exporter;
using OpenTelemetry.Logs;
using OpenTelemetry.Resources;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using LogLevel = EonaCat.LogStack.Core.LogLevel;
namespace EonaCat.LogStack.Flows
{
public sealed class OpenTelemetryFlow : FlowBase
{
private readonly ILoggerFactory _loggerFactory;
private readonly ILogger _logger;
public OpenTelemetryFlow(string serviceName, Uri endpoint, OtlpExportProtocol protocol = OtlpExportProtocol.Grpc, LogLevel minimumLevel = LogLevel.Trace) : base("OpenTelemetry:" + serviceName, minimumLevel)
{
if (string.IsNullOrWhiteSpace(serviceName))
{
throw new ArgumentNullException(nameof(serviceName));
}
if (endpoint == null)
{
throw new ArgumentNullException(nameof(endpoint));
}
_loggerFactory = LoggerFactory.Create(builder =>
{
builder.ClearProviders();
builder.AddOpenTelemetry(options =>
{
options.SetResourceBuilder(
ResourceBuilder.CreateDefault()
.AddService(serviceName)
.AddAttributes(new Dictionary<string, object>
{
["host.name"] = Environment.MachineName,
["process.id"] = Process.GetCurrentProcess().Id
}));
options.AddOtlpExporter(otlp =>
{
otlp.Endpoint = endpoint;
otlp.Protocol = protocol;
});
options.IncludeScopes = true;
options.IncludeFormattedMessage = true;
options.ParseStateValues = true;
});
});
_logger = _loggerFactory.CreateLogger(serviceName);
}
public override Task<WriteResult> BlastAsync(
LogEvent logEvent,
CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
WriteLog(logEvent);
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
public override Task<WriteResult> BlastBatchAsync(
ReadOnlyMemory<LogEvent> logEvents,
CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return Task.FromResult(WriteResult.FlowDisabled);
}
foreach (var e in logEvents.Span)
{
if (e.Level < MinimumLevel)
{
continue;
}
WriteLog(e);
Interlocked.Increment(ref BlastedCount);
}
return Task.FromResult(WriteResult.Success);
}
private void WriteLog(LogEvent log)
{
var state = new List<KeyValuePair<string, object>>();
if (!string.IsNullOrEmpty(log.Category))
{
state.Add(new KeyValuePair<string, object>("category", log.Category));
}
foreach (var prop in log.Properties)
{
state.Add(new KeyValuePair<string, object>(prop.Key, prop.Value ?? "null"));
}
if (log.Exception != null)
{
state.Add(new KeyValuePair<string, object>("exception.type", log.Exception.GetType().FullName));
state.Add(new KeyValuePair<string, object>("exception.message", log.Exception.Message));
state.Add(new KeyValuePair<string, object>("exception.stacktrace", log.Exception.StackTrace));
}
_logger.Log(
MapLevel(log.Level),
new EventId(0, log.Category),
state,
log.Exception,
(s, e) => log.Message.ToString());
}
private static Microsoft.Extensions.Logging.LogLevel MapLevel(LogLevel level)
{
return level switch
{
LogLevel.Trace => Microsoft.Extensions.Logging.LogLevel.Trace,
LogLevel.Debug => Microsoft.Extensions.Logging.LogLevel.Debug,
LogLevel.Information => Microsoft.Extensions.Logging.LogLevel.Information,
LogLevel.Warning => Microsoft.Extensions.Logging.LogLevel.Warning,
LogLevel.Error => Microsoft.Extensions.Logging.LogLevel.Error,
LogLevel.Critical => Microsoft.Extensions.Logging.LogLevel.Critical,
_ => Microsoft.Extensions.Logging.LogLevel.Information
};
}
public override async ValueTask DisposeAsync()
{
if (!IsEnabled)
{
return;
}
IsEnabled = false;
_loggerFactory?.Dispose();
await base.DisposeAsync().ConfigureAwait(false);
}
public override Task FlushAsync(CancellationToken cancellationToken = default)
{
return Task.CompletedTask;
}
}
}

View File

@@ -0,0 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Serilog.AspNetCore" Version="10.0.0" />
<PackageReference Include="Serilog.Sinks.Async" Version="2.1.0" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
<PackageReference Include="Serilog.Sinks.Network" Version="3.0.0" />
<PackageReference Include="Serilog.Sinks.Seq" Version="9.0.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,197 @@
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.DataProtection;
using Microsoft.Extensions.Hosting;
using Serilog;
using Serilog.Events;
using Serilog.Formatting.Json;
using System.Net.Sockets;
using System.Runtime.InteropServices;
using System.Text;
var builder = WebApplication.CreateBuilder(args);
//
// LOGGER CONFIGURATION (Equivalent to LoggerSettings)
//
Log.Logger = new LoggerConfiguration()
.MinimumLevel.Verbose()
.Enrich.WithProperty("Id", "TEST")
.Enrich.WithProperty("AppName", "[JIJ BENT EEN BRASSER!]")
.WriteTo.Async(a => a.Console())
.WriteTo.Async(a => a.File(
path: "logs/web-.log",
rollingInterval: RollingInterval.Day,
fileSizeLimitBytes: 1_000_000,
rollOnFileSizeLimit: true,
retainedFileCountLimit: 5,
shared: true))
.WriteTo.Async(a => a.File(
new JsonFormatter(),
path: "logs/test.json",
rollingInterval: RollingInterval.Day))
//.WriteTo.Seq("http://localhost:5341") // central logging
.CreateLogger();
builder.Services.AddDataProtection()
.PersistKeysToFileSystem(new DirectoryInfo(Path.Combine(Directory.GetCurrentDirectory(), "keys")))
.SetApplicationName("SerilogStressTest");
builder.Services.AddRazorPages();
builder.WebHost.ConfigureKestrel(options =>
{
options.ListenAnyIP(6000);
});
var app = builder.Build();
app.UseHttpsRedirection();
app.UseStaticFiles();
app.UseRouting();
app.UseAuthorization();
app.MapRazorPages();
//
// ==============================
// 🔥 TESTS START HERE
// ==============================
//
_ = Task.Run(RunLoggingTestsAsync);
_ = Task.Run(RunWebLoggingTestsAsync);
_ = Task.Run(RunLoggingExceptionTests);
_ = Task.Run(RunWebLoggingExceptionTests);
//_ = Task.Run(RunMemoryLeakTest);
_ = Task.Run(RunTcpLoggerTest);
app.Run();
// =======================================================
// 1⃣ EXACT HIGH-SPEED FILE LOGGING LOOP
// =======================================================
async Task RunLoggingTestsAsync()
{
for (var i = 0; i < 9_000_000; i++)
{
Log.Information("test to file {i} INFO", i);
Log.Fatal("test to file {i} CRITICAL", i);
Log.Debug("test to file {i} DEBUG", i);
Log.Error("test to file {i} ERROR", i);
Log.Verbose("test to file {i} TRACE", i);
Log.Warning("test to file {i} WARNING", i);
Console.WriteLine($"Logged: {i}");
await Task.Delay(1);
}
}
// =======================================================
// 2⃣ WEB LOGGER STRESS TEST
// =======================================================
async Task RunWebLoggingTestsAsync()
{
int i = 0;
while (true)
{
i++;
Log.Information("web-test {i}", i);
Log.Debug("web-test {i}", i);
Log.Warning("web-test {i}", i);
Log.Error("web-test {i}", i);
Log.Verbose("web-test {i}", i);
await Task.Delay(1);
}
}
// =======================================================
// 3⃣ EXCEPTION TEST (FILE LOGGER)
// =======================================================
void RunLoggingExceptionTests()
{
for (int i = 0; i < 10; i++)
{
try
{
throw new Exception($"Normal Exception {i}");
}
catch (Exception ex)
{
Log.Error(ex, "Exception {Index}", i);
Console.WriteLine($"Normal ExceptionLogged: {i}");
}
}
}
// =======================================================
// 4⃣ WEB EXCEPTION TEST
// =======================================================
void RunWebLoggingExceptionTests()
{
for (int i = 0; i < 10; i++)
{
try
{
throw new Exception($"WebException {i}");
}
catch (Exception ex)
{
Log.Fatal(ex, "CRITICAL");
Log.Debug(ex, "DEBUG");
Log.Error(ex, "ERROR");
Log.Verbose(ex, "TRACE");
Log.Warning(ex, "WARNING");
Log.Information(ex, "INFORMATION");
Console.WriteLine($"WebExceptionLogged: {i}");
}
}
}
// =======================================================
// 6⃣ MEMORY LEAK TEST (IDENTICAL BEHAVIOR)
// =======================================================
async Task RunMemoryLeakTest()
{
var managedLeak = new List<byte[]>();
while (true)
{
managedLeak.Add(new byte[5_000_000]); // 5MB
Marshal.AllocHGlobal(10_000_000); // 10MB unmanaged
await Task.Delay(500);
}
}
// =======================================================
// 7⃣ TCP LOGGER TEST
// =======================================================
async Task RunTcpLoggerTest()
{
using var client = new TcpClient();
try
{
await client.ConnectAsync("192.168.1.1", 12345);
int i = 0;
while (true)
{
var message = Encoding.UTF8.GetBytes($"TCP log {++i}\n");
await client.GetStream().WriteAsync(message);
await Task.Delay(1000);
}
}
catch
{
Log.Warning("TCP server not reachable");
}
}

View File

@@ -0,0 +1,12 @@
{
"profiles": {
"EonaCat.LogStack.SerilogTest": {
"commandName": "Project",
"launchBrowser": true,
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
},
"applicationUrl": "https://localhost:56815;http://localhost:56816"
}
}
}

View File

@@ -0,0 +1,16 @@
<?xml version="1.0" encoding="utf-8"?>
<key id="ce36e671-3994-4686-93db-52c792888079" version="1">
<creationDate>2026-02-13T12:58:52.6395786Z</creationDate>
<activationDate>2026-02-13T12:58:52.6395786Z</activationDate>
<expirationDate>2026-05-14T12:58:52.6395786Z</expirationDate>
<descriptor deserializerType="Microsoft.AspNetCore.DataProtection.AuthenticatedEncryption.ConfigurationModel.AuthenticatedEncryptorDescriptorDeserializer, Microsoft.AspNetCore.DataProtection, Version=10.0.0.0, Culture=neutral, PublicKeyToken=adb9793829ddae60">
<descriptor>
<encryption algorithm="AES_256_CBC" />
<validation algorithm="HMACSHA256" />
<masterKey p4:requiresEncryption="true" xmlns:p4="http://schemas.asp.net/2015/03/dataProtection">
<!-- Warning: the key below is in an unencrypted form. -->
<value>/V8LCH65h4jnYN0CNj+b+f/KcWTcYS7HEFlmIS8h/ryyTH5YEXlxLIWHxoZYbu6+vY7JXF3O+iDkdNnuW8BtFg==</value>
</masterKey>
</descriptor>
</descriptor>
</key>

View File

@@ -0,0 +1,28 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
<Nullable>enable</Nullable>
<GeneratePackageOnBuild>True</GeneratePackageOnBuild>
<Title>EonaCat.LogStack.Server</Title>
<Company>EonaCat (Jeroen Saey)</Company>
<Description>EonaCat.LogStack.Server is a server for the logging library</Description>
<Copyright>EonaCat (Jeroen Saey)</Copyright>
<PackageProjectUrl>https://www.nuget.org/packages/EonaCat.LogStack.Server/</PackageProjectUrl>
<PackageIcon>icon.png</PackageIcon>
<PackageReadmeFile>README.md</PackageReadmeFile>
<PackageTags>EonaCat;Logger;EonaCatLogStack;server;Log;Writer;Jeroen;Saey</PackageTags>
</PropertyGroup>
<ItemGroup>
<None Include="..\EonaCat.LogStack\icon.png">
<Pack>True</Pack>
<PackagePath>\</PackagePath>
</None>
<None Include="..\README.md">
<Pack>True</Pack>
<PackagePath>\</PackagePath>
</None>
</ItemGroup>
</Project>

View File

@@ -0,0 +1,287 @@
using System.IO;
using System.Net.Sockets;
using System.Net;
using System.Text;
using System.Threading.Tasks;
using System.Threading;
using System;
using System.Linq;
namespace EonaCat.LogStack.Server
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public class Server
{
private TcpListener _tcpListener;
private UdpClient _udpListener;
private CancellationTokenSource _cts;
private bool _isRunning;
private readonly bool _useUdp;
private const long MaxLogFileSize = 200 * 1024 * 1024; // 200MB log rollover limit
private readonly int _logRetentionDays; // Number of days to retain logs
private readonly long _maxLogDirectorySize; // Maximum allowed size of the logs directory
private const int UdpBufferSize = 65507; // Maximum UDP packet size (65507 bytes for UDP payload)
/// <summary>
/// EonaCat Log Server
/// </summary>
/// <param name="useUdp">Determine if we need to start a udp server (default: true)</param>
/// <param name="logRetentionDays">Max log retention days (default: 30)</param>
/// <param name="maxLogDirectorySize">Max log directory size (default: 10GB)</param>
public Server(bool useUdp = true, int logRetentionDays = 30, long maxLogDirectorySize = 10L * 1024 * 1024 * 1024) // Default 10GB max directory size
{
_useUdp = useUdp;
_logRetentionDays = logRetentionDays;
_maxLogDirectorySize = maxLogDirectorySize;
}
protected virtual Task ProcessLogAsync(string logData)
{
string logsRootDirectory = "logs";
// Create root log directory if it doesn't exist
if (!Directory.Exists(logsRootDirectory))
{
Directory.CreateDirectory(logsRootDirectory);
}
// Create a daily directory for logs
string dailyLogsDirectory = Path.Combine(logsRootDirectory, DateTime.Now.ToString("yyyyMMdd"));
if (!Directory.Exists(dailyLogsDirectory))
{
Directory.CreateDirectory(dailyLogsDirectory);
}
// Base log file name
string baseLogFilePath = Path.Combine(dailyLogsDirectory, "EonaCatLogs");
string logFilePath = baseLogFilePath + ".log";
int fileIndex = 1;
while (File.Exists(logFilePath) && new FileInfo(logFilePath).Length > MaxLogFileSize)
{
logFilePath = baseLogFilePath + $"_{fileIndex}.log";
fileIndex++;
}
// After processing log, check directory size and clean up if needed
CleanUpOldLogs();
return File.AppendAllTextAsync(logFilePath, logData + Environment.NewLine);
}
private void CleanUpOldLogs()
{
string logsRootDirectory = "logs";
if (!Directory.Exists(logsRootDirectory))
{
return;
}
// Delete old directories
foreach (var directory in Directory.GetDirectories(logsRootDirectory))
{
try
{
DirectoryInfo dirInfo = new DirectoryInfo(directory);
if (dirInfo.CreationTime < DateTime.Now.AddDays(-_logRetentionDays))
{
Console.WriteLine($"Deleting old log directory: {directory}");
Directory.Delete(directory, true); // Delete directory and its contents
}
}
catch (Exception ex)
{
Console.WriteLine($"Error deleting old directory {directory}: {ex.Message}");
}
}
// Ensure total size of log directory doesn't exceed max limit
long totalDirectorySize = GetDirectorySize(logsRootDirectory);
if (totalDirectorySize > _maxLogDirectorySize)
{
Console.WriteLine("Log directory size exceeded limit, cleaning up...");
// Delete the oldest directories until the size limit is met
foreach (var directory in Directory.GetDirectories(logsRootDirectory).OrderBy(d => new DirectoryInfo(d).CreationTime))
{
try
{
DirectoryInfo dirInfo = new DirectoryInfo(directory);
long dirSize = GetDirectorySize(directory);
totalDirectorySize -= dirSize;
// Delete the directory if the total size exceeds the limit
Directory.Delete(directory, true);
Console.WriteLine($"Deleted directory: {directory}");
// Stop deleting if we are under the size limit
if (totalDirectorySize <= _maxLogDirectorySize)
{
break;
}
}
catch (Exception ex)
{
Console.WriteLine($"Error deleting directory {directory}: {ex.Message}");
}
}
}
}
private long GetDirectorySize(string directory)
{
long size = 0;
try
{
// Add size of files in the directory
size += Directory.GetFiles(directory).Sum(file => new FileInfo(file).Length);
// Add size of files in subdirectories
foreach (var subdirectory in Directory.GetDirectories(directory))
{
size += GetDirectorySize(subdirectory);
}
}
catch (Exception ex)
{
Console.WriteLine($"Error calculating size for directory {directory}: {ex.Message}");
}
return size;
}
public async Task Start(IPAddress ipAddress = null, int port = 5555)
{
if (ipAddress == null)
{
ipAddress = IPAddress.Any;
}
_cts = new CancellationTokenSource();
_isRunning = true;
if (_useUdp)
{
_udpListener = new UdpClient(port);
Console.WriteLine($"EonaCat UDP Log Server started on port {port}...");
await ListenUdpAsync();
}
else
{
_tcpListener = new TcpListener(ipAddress, port);
_tcpListener.Start();
Console.WriteLine($"EonaCat TCP Log Server started on port {port}...");
await ListenTcpAsync();
}
}
private async Task ListenTcpAsync()
{
try
{
while (!_cts.Token.IsCancellationRequested)
{
TcpClient client = await _tcpListener.AcceptTcpClientAsync();
_ = Task.Run(() => HandleTcpClient(client));
}
}
catch (OperationCanceledException)
{
Console.WriteLine("TCP Server stopping...");
}
}
private async Task ListenUdpAsync()
{
try
{
while (!_cts.Token.IsCancellationRequested)
{
// Increased buffer size for UDP
UdpReceiveResult result = await _udpListener.ReceiveAsync();
string logData = Encoding.UTF8.GetString(result.Buffer);
// If the received data is too large, process it in chunks
if (result.Buffer.Length > UdpBufferSize)
{
// Handle fragmentation and reassembly (this is a basic placeholder logic)
Console.WriteLine("Received large UDP data. Handling fragmentation.");
await ProcessLargeDataAsync(result.Buffer);
}
else
{
Console.WriteLine($"Received UDP Log: {logData}");
await ProcessLogAsync(logData);
}
}
}
catch (OperationCanceledException)
{
Console.WriteLine("UDP Server stopping...");
}
}
private async Task ProcessLargeDataAsync(byte[] data)
{
// You can implement your own logic here for processing large UDP data, such as fragmentation handling
string largeDataString = Encoding.UTF8.GetString(data);
await ProcessLogAsync(largeDataString);
}
public void Stop()
{
if (_isRunning)
{
_cts.Cancel();
// Proper cleanup of resources
_cts.Dispose();
if (_useUdp)
{
_udpListener?.Close();
_udpListener?.Dispose();
}
else
{
_tcpListener?.Stop();
_tcpListener?.Server?.Dispose(); // Dispose of the socket (if any)
}
_isRunning = false;
Console.WriteLine("EonaCat Log Server stopped.");
}
}
private async Task HandleTcpClient(TcpClient client)
{
try
{
using (NetworkStream stream = client.GetStream())
using (StreamReader reader = new StreamReader(stream, Encoding.UTF8))
{
char[] buffer = new char[8192]; // 8KB buffer size for large data
int bytesRead;
StringBuilder logData = new StringBuilder();
while ((bytesRead = await reader.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
logData.Append(new string(buffer, 0, bytesRead));
}
Console.WriteLine($"Received TCP Log: {logData.ToString()}");
await ProcessLogAsync(logData.ToString());
}
}
catch (Exception ex)
{
Console.WriteLine($"Error: {ex.Message}");
}
finally
{
// Ensure client is properly disposed
client.Close();
client.Dispose();
}
}
}
}

View File

@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.1</TargetFramework>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.Diagnostics.EventLog" Version="10.0.3" />
<PackageReference Include="System.Threading.AccessControl" Version="10.0.3" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\EonaCat.LogStack\EonaCat.LogStack.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,32 @@
using EonaCat.LogStack.Configuration;
using EonaCat.LogStack.Flows;
using System;
using System.Collections.Generic;
using System.Text;
namespace EonaCat.LogStack.Flows.WindowsEventLog
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public static class EonaCatLogStackExtensions
{
/// <summary>
/// Write to Windows Event log
/// </summary>
public static LogBuilder WriteToWindowsEventLog(this LogBuilder logBuilder,
string sourceName = "EonaCatLogStack",
string logName = "Application",
int maxMessageLength = 30000,
Core.LogLevel minimumLevel = Core.LogLevel.Warning)
{
logBuilder.AddFlow(new WindowsEventLogFlow(
sourceName,
logName,
maxMessageLength,
minimumLevel));
WindowsEventLogFlow.EnsureSourceExists();
return logBuilder;
}
}
}

View File

@@ -0,0 +1,258 @@
using EonaCat.LogStack.Core;
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Writes log events to the Windows Event Log.
///
/// Requires the source to be registered before first use.
/// Call <see cref="EnsureSourceExists"/> once during application setup
/// (requires elevated privileges the first time).
///
/// .NET 4.8.1 compatible. Silently no-ops on non-Windows platforms.
/// </summary>
public sealed class WindowsEventLogFlow : FlowBase
{
private readonly string _sourceName;
private readonly string _logName;
private readonly int _maxMessageLength;
private System.Diagnostics.EventLog _eventLog;
private readonly object _initLock = new object();
private volatile bool _initialized;
public WindowsEventLogFlow(
string sourceName = "EonaCatLogStack",
string logName = "Application",
int maxMessageLength = 30000,
LogLevel minimumLevel = LogLevel.Warning)
: base("WindowsEventLog:" + sourceName, minimumLevel)
{
if (sourceName == null)
{
throw new ArgumentNullException("sourceName");
}
if (logName == null)
{
throw new ArgumentNullException("logName");
}
_sourceName = sourceName;
_logName = logName;
_maxMessageLength = maxMessageLength;
}
/// <summary>
/// Registers the event source with the OS. Must be called with admin rights
/// the first time on each machine. Safe to call repeatedly.
/// </summary>
public static void EnsureSourceExists(string sourceName = "EonaCatLogStack",
string logName = "Application")
{
if (!IsWindows())
{
return;
}
try
{
if (!System.Diagnostics.EventLog.SourceExists(sourceName))
{
System.Diagnostics.EventLog.CreateEventSource(sourceName, logName);
}
}
catch (Exception ex)
{
Console.Error.WriteLine("[WindowsEventLogFlow] Cannot create source: " + ex.Message);
}
}
public override Task<WriteResult> BlastAsync(
LogEvent logEvent,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (!IsWindows())
{
return Task.FromResult(WriteResult.Success);
}
EnsureInitialized();
if (_eventLog == null)
{
return Task.FromResult(WriteResult.Dropped);
}
try
{
string msg = BuildMessage(logEvent);
if (msg.Length > _maxMessageLength)
{
msg = msg.Substring(0, _maxMessageLength) + "... [truncated]";
}
_eventLog.WriteEntry(msg, ToEventType(logEvent.Level), ToEventId(logEvent.Level));
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
catch (Exception ex)
{
Console.Error.WriteLine("[WindowsEventLogFlow] Write error: " + ex.Message);
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
}
public override Task<WriteResult> BlastBatchAsync(
ReadOnlyMemory<LogEvent> logEvents,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled)
{
return Task.FromResult(WriteResult.FlowDisabled);
}
foreach (LogEvent e in logEvents.ToArray())
{
if (IsLogLevelEnabled(e))
{
BlastAsync(e, cancellationToken);
}
}
return Task.FromResult(WriteResult.Success);
}
public override Task FlushAsync(CancellationToken cancellationToken = default(CancellationToken))
=> Task.FromResult(0);
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
if (_eventLog != null) { try { _eventLog.Dispose(); } catch { } }
await base.DisposeAsync().ConfigureAwait(false);
}
// ----------------------------------------------------------------- helpers
private void EnsureInitialized()
{
if (_initialized)
{
return;
}
lock (_initLock)
{
if (_initialized)
{
return;
}
try
{
if (System.Diagnostics.EventLog.SourceExists(_sourceName))
{
_eventLog = new System.Diagnostics.EventLog(_logName) { Source = _sourceName };
}
else
{
Console.Error.WriteLine(
"[WindowsEventLogFlow] Source '" + _sourceName +
"' not registered. Call EnsureSourceExists() with admin rights.");
}
}
catch (Exception ex)
{
Console.Error.WriteLine("[WindowsEventLogFlow] Init error: " + ex.Message);
}
_initialized = true;
}
}
private static string BuildMessage(LogEvent log)
{
var sb = new System.Text.StringBuilder(512);
sb.Append("Level: ").AppendLine(LevelString(log.Level));
sb.Append("Category: ").AppendLine(log.Category ?? string.Empty);
sb.Append("Time: ").AppendLine(LogEvent.GetDateTime(log.Timestamp).ToString("O"));
sb.Append("Message: ").AppendLine(log.Message.Length > 0 ? log.Message.ToString() : string.Empty);
if (log.Exception != null)
{
sb.Append("Exception: ").AppendLine(log.Exception.ToString());
}
if (log.Properties.Count > 0)
{
sb.AppendLine("Properties:");
foreach (var kv in log.Properties.ToArray())
{
sb.Append(" ").Append(kv.Key).Append(" = ")
.AppendLine(kv.Value != null ? kv.Value.ToString() : "null");
}
}
return sb.ToString();
}
private static System.Diagnostics.EventLogEntryType ToEventType(LogLevel level)
{
switch (level)
{
case LogLevel.Warning: return System.Diagnostics.EventLogEntryType.Warning;
case LogLevel.Error:
case LogLevel.Critical: return System.Diagnostics.EventLogEntryType.Error;
default: return System.Diagnostics.EventLogEntryType.Information;
}
}
private static int ToEventId(LogLevel level)
{
// Stable event IDs per level for easy filtering in Event Viewer
switch (level)
{
case LogLevel.Trace: return 1000;
case LogLevel.Debug: return 1001;
case LogLevel.Information: return 1002;
case LogLevel.Warning: return 1003;
case LogLevel.Error: return 1004;
case LogLevel.Critical: return 1005;
default: return 1999;
}
}
private static string LevelString(LogLevel level)
{
switch (level)
{
case LogLevel.Trace: return "TRACE";
case LogLevel.Debug: return "DEBUG";
case LogLevel.Information: return "INFO";
case LogLevel.Warning: return "WARN";
case LogLevel.Error: return "ERROR";
case LogLevel.Critical: return "CRITICAL";
default: return level.ToString().ToUpperInvariant();
}
}
private static bool IsWindows()
{
#if NET48 || NET45 || NET451 || NET452 || NET46 || NET461 || NET462 || NET47 || NET471 || NET472 || NET481
return true;
#else
return System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Windows);
#endif
}
}
}

126
EonaCat.LogStack.sln Normal file
View File

@@ -0,0 +1,126 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 18
VisualStudioVersion = 18.1.11312.151
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "EonaCat.LogStack", "EonaCat.LogStack\EonaCat.LogStack.csproj", "{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "EonaCat.LogStack.LogClient", "EonaCat.LogStack.LogClient\EonaCat.LogStack.LogClient.csproj", "{D1025803-9588-46EB-8771-88E25209B780}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConsoleApp1", "ConsoleApp1\ConsoleApp1.csproj", "{C9F66B51-6661-467A-9E22-E0E578EB76A1}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "EonaCat.LogStack.Flows.WindowsEventLog", "EonaCat.LogStack.WindowsEventLogFlow\EonaCat.LogStack.Flows.WindowsEventLog.csproj", "{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "EonaCat.LogStack.OpenTelemetryFlow", "EonaCat.LogStack.OpenTelemetryFlow\EonaCat.LogStack.OpenTelemetryFlow.csproj", "{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "EonaCat.LogStack.SerilogTest", "EonaCat.LogStack.SerilogTest\EonaCat.LogStack.SerilogTest.csproj", "{F360998D-46E0-5A88-BA3E-47A4162C8EB4}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "EonaCat.LogStack.Test.Web", "Testers\EonaCat.LogStack.Test.Web\EonaCat.LogStack.Test.Web.csproj", "{9240A706-1852-C232-FB58-E54A5A528135}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{85A2505C-8976-4046-963B-D7B63EF81E47}"
ProjectSection(SolutionItems) = preProject
README.md = README.md
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Debug|x64 = Debug|x64
Debug|x86 = Debug|x86
Release|Any CPU = Release|Any CPU
Release|x64 = Release|x64
Release|x86 = Release|x86
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Debug|Any CPU.Build.0 = Debug|Any CPU
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Debug|x64.ActiveCfg = Debug|Any CPU
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Debug|x64.Build.0 = Debug|Any CPU
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Debug|x86.ActiveCfg = Debug|Any CPU
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Debug|x86.Build.0 = Debug|Any CPU
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Release|Any CPU.ActiveCfg = Release|Any CPU
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Release|Any CPU.Build.0 = Release|Any CPU
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Release|x64.ActiveCfg = Release|Any CPU
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Release|x64.Build.0 = Release|Any CPU
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Release|x86.ActiveCfg = Release|Any CPU
{DCD1D32E-0F24-4D0F-A6B6-59941C0F9BB7}.Release|x86.Build.0 = Release|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Debug|x64.ActiveCfg = Debug|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Debug|x64.Build.0 = Debug|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Debug|x86.ActiveCfg = Debug|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Debug|x86.Build.0 = Debug|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Release|Any CPU.Build.0 = Release|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Release|x64.ActiveCfg = Release|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Release|x64.Build.0 = Release|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Release|x86.ActiveCfg = Release|Any CPU
{D1025803-9588-46EB-8771-88E25209B780}.Release|x86.Build.0 = Release|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Debug|x64.ActiveCfg = Debug|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Debug|x64.Build.0 = Debug|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Debug|x86.ActiveCfg = Debug|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Debug|x86.Build.0 = Debug|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Release|Any CPU.Build.0 = Release|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Release|x64.ActiveCfg = Release|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Release|x64.Build.0 = Release|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Release|x86.ActiveCfg = Release|Any CPU
{C9F66B51-6661-467A-9E22-E0E578EB76A1}.Release|x86.Build.0 = Release|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Debug|x64.ActiveCfg = Debug|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Debug|x64.Build.0 = Debug|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Debug|x86.ActiveCfg = Debug|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Debug|x86.Build.0 = Debug|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Release|Any CPU.Build.0 = Release|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Release|x64.ActiveCfg = Release|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Release|x64.Build.0 = Release|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Release|x86.ActiveCfg = Release|Any CPU
{F5EFDDEA-C4A4-4AE7-B853-DF91062D4558}.Release|x86.Build.0 = Release|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Debug|x64.ActiveCfg = Debug|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Debug|x64.Build.0 = Debug|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Debug|x86.ActiveCfg = Debug|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Debug|x86.Build.0 = Debug|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Release|Any CPU.Build.0 = Release|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Release|x64.ActiveCfg = Release|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Release|x64.Build.0 = Release|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Release|x86.ActiveCfg = Release|Any CPU
{CBF0AF0C-CF27-7D45-BCC2-DA7B7A40539C}.Release|x86.Build.0 = Release|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Debug|x64.ActiveCfg = Debug|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Debug|x64.Build.0 = Debug|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Debug|x86.ActiveCfg = Debug|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Debug|x86.Build.0 = Debug|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Release|Any CPU.Build.0 = Release|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Release|x64.ActiveCfg = Release|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Release|x64.Build.0 = Release|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Release|x86.ActiveCfg = Release|Any CPU
{F360998D-46E0-5A88-BA3E-47A4162C8EB4}.Release|x86.Build.0 = Release|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Debug|x64.ActiveCfg = Debug|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Debug|x64.Build.0 = Debug|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Debug|x86.ActiveCfg = Debug|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Debug|x86.Build.0 = Debug|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Release|Any CPU.Build.0 = Release|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Release|x64.ActiveCfg = Release|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Release|x64.Build.0 = Release|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Release|x86.ActiveCfg = Release|Any CPU
{9240A706-1852-C232-FB58-E54A5A528135}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {B01183F3-D85E-45FB-9749-DA281F465A0F}
EndGlobalSection
EndGlobal

View File

@@ -0,0 +1,11 @@
namespace EonaCat.LogStack;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public static class Constants
{
public static class DateTimeFormats
{
public static string LOGGING { get; set; } = "yyyy-MM-dd HH:mm:ss.fff";
}
}

View File

@@ -0,0 +1,27 @@
using EonaCat.Versioning.Helpers;
using System.Reflection;
namespace EonaCat.LogStack;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public static class DllInfo
{
public const string NAME = "EonaCatLogStack";
public const string VERSION = "0.0.1";
static DllInfo()
{
var isDebug = false;
#if DEBUG
isDebug = true;
#endif
VersionName = isDebug ? "DEBUG" : "RELEASE";
}
internal static string VersionName { get; }
public static string ApplicationName { get; set; } = "EonaCatLogStack";
public static string EonaCatVersion => VersionHelper.GetEonaCatVersion(Assembly.GetExecutingAssembly());
}

199
EonaCat.LogStack/Enums.cs Normal file
View File

@@ -0,0 +1,199 @@
using Microsoft.Extensions.Logging;
namespace EonaCat.LogStack;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public static class LogTypeConverter
{
public static Core.LogLevel FromLogLevel(this LogLevel logLevel)
{
switch (logLevel)
{
case LogLevel.None:
return Core.LogLevel.None;
case LogLevel.Error:
return Core.LogLevel.Error;
case LogLevel.Debug:
return Core.LogLevel.Debug;
case LogLevel.Critical:
return Core.LogLevel.Critical;
case LogLevel.Warning:
return Core.LogLevel.Warning;
case LogLevel.Trace:
return Core.LogLevel.Trace;
case LogLevel.Information:
return Core.LogLevel.Information;
default:
return Core.LogLevel.Trace;
}
}
public static LogLevel ToLogLevel(this Core.LogLevel logLevel)
{
switch (logLevel)
{
case Core.LogLevel.None:
return LogLevel.None;
case Core.LogLevel.Error:
return LogLevel.Error;
case Core.LogLevel.Debug:
return LogLevel.Debug;
case Core.LogLevel.Critical:
return LogLevel.Critical;
case Core.LogLevel.Warning:
return LogLevel.Warning;
case Core.LogLevel.Trace:
return LogLevel.Trace;
case Core.LogLevel.Information:
return LogLevel.Information;
default:
return LogLevel.Information;
}
}
public static string ToString(this Core.LogLevel logLevel)
{
switch (logLevel)
{
case Core.LogLevel.None:
return "NONE";
case Core.LogLevel.Error:
return "ERROR";
case Core.LogLevel.Debug:
return "DEBUG";
case Core.LogLevel.Critical:
return "CRITICAL";
case Core.LogLevel.Warning:
return "WARNING";
case Core.LogLevel.Trace:
return "TRACE";
case Core.LogLevel.Information:
return "INFO";
default:
return "INFO";
}
}
public static Core.LogLevel FromSeverity(this ESeverity logLevel)
{
switch (logLevel)
{
case ESeverity.Debug:
return Core.LogLevel.Debug;
case ESeverity.Warn:
return Core.LogLevel.Warning;
case ESeverity.Emergency:
return Core.LogLevel.Trace;
case ESeverity.Critical:
return Core.LogLevel.Critical;
case ESeverity.Error:
return Core.LogLevel.Error;
default:
return Core.LogLevel.Information;
}
}
public static int ToGrayLogLevel(this Core.LogLevel logLevel)
{
// Loglevel to GELF format
switch (logLevel.ToString())
{
case "TRACE": return 7;
case "DEBUG": return 7;
case "INFO": return 6;
case "WARNING": return 4;
case "ERROR": return 3;
case "CRITICAL": return 2;
default: return 6; // Default to INFO
}
}
public static ESeverity ToSeverity(this Core.LogLevel logLevel)
{
switch (logLevel)
{
case Core.LogLevel.Debug:
return ESeverity.Debug;
case Core.LogLevel.Warning:
return ESeverity.Warn;
case Core.LogLevel.Critical:
return ESeverity.Critical;
case Core.LogLevel.Trace:
return ESeverity.Emergency;
case Core.LogLevel.Error:
return ESeverity.Error;
default:
return ESeverity.Info;
}
}
}
public enum SyslogFacility
{
Kernel = 0, // 0 - Kernel messages
UserLevel = 1, // 1 - User-level messages
MailSystem = 2, // 2 - Mail system
Daemon = 3, // 3 - Daemon messages
Auth = 4, // 4 - Security/authorization messages
Syslog = 5, // 5 - Messages generated by syslogd
Lpr = 6, // 6 - Line printer subsystem
News = 7, // 7 - Network news subsystem
UUCP = 8, // 8 - UUCP subsystem
Clock = 9, // 9 - Clock daemon
AuthPriv = 10, // 10 - Security/authorization messages (privileged)
Ftp = 11, // 11 - FTP daemon
Ntp = 12, // 12 - NTP subsystem
Audit = 13, // 13 - Audit messages
Alert = 14, // 14 - Log alert messages
Cron = 15, // 15 - Cron daemon
Local0 = 16, // 16 - Local use 0 (custom usage)
Local1 = 17, // 17 - Local use 1 (custom usage)
Local2 = 18, // 18 - Local use 2 (custom usage)
Local3 = 19, // 19 - Local use 3 (custom usage)
Local4 = 20, // 20 - Local use 4 (custom usage)
Local5 = 21, // 21 - Local use 5 (custom usage)
Local6 = 22, // 22 - Local use 6 (custom usage)
Local7 = 23 // 23 - Local use 7 (custom usage)
}
/// <summary>
/// Message severity.
/// </summary>
public enum ESeverity
{
/// <summary>
/// Debug messages.
/// </summary>
Debug = 0,
/// <summary>
/// Informational messages.
/// </summary>
Info = 1,
/// <summary>
/// Warning messages.
/// </summary>
Warn = 2,
/// <summary>
/// Error messages.
/// </summary>
Error = 3,
/// <summary>
/// Alert messages.
/// </summary>
Alert = 4,
/// <summary>
/// Critical messages.
/// </summary>
Critical = 5,
/// <summary>
/// Emergency messages.
/// </summary>
Emergency = 6
}

View File

@@ -0,0 +1,85 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>.netstandard2.1; net8.0; net4.8;</TargetFrameworks>
<ApplicationIcon>icon.ico</ApplicationIcon>
<LangVersion>latest</LangVersion>
<Authors>EonaCat (Jeroen Saey)</Authors>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
<Company>EonaCat (Jeroen Saey)</Company>
<PackageIcon>icon.png</PackageIcon>
<PackageProjectUrl>https://www.nuget.org/packages/EonaCat.LogStack/</PackageProjectUrl>
<Description>EonaCat.LogStack is a logging library</Description>
<PackageReleaseNotes>Public release version</PackageReleaseNotes>
<Copyright>EonaCat (Jeroen Saey)</Copyright>
<PackageTags>EonaCat;Logger;EonaCatLogStack;Log;Writer;Jeroen;Saey</PackageTags>
<PackageIconUrl />
<Version>1.7.9</Version>
<FileVersion>1.7.9</FileVersion>
<PackageReadmeFile>README.md</PackageReadmeFile>
<GenerateDocumentationFile>True</GenerateDocumentationFile>
<PackageLicenseFile>LICENSE</PackageLicenseFile>
<PackageRequireLicenseAcceptance>True</PackageRequireLicenseAcceptance>
<Title>EonaCat.LogStack</Title>
<RepositoryType>git</RepositoryType>
</PropertyGroup>
<PropertyGroup>
<EVRevisionFormat>1.7.6+{chash:10}.{c:ymd}</EVRevisionFormat>
<EVDefault>true</EVDefault>
<EVInfo>true</EVInfo>
<EVTagMatch>v[0-9]*</EVTagMatch>
<EVRemoveTagV>true</EVRemoveTagV>
<EVVcs>git</EVVcs>
<EVCheckAllAttributes>true</EVCheckAllAttributes>
<EVShowRevision>true</EVShowRevision>
</PropertyGroup>
<PropertyGroup>
<Version>$(GeneratedVersion)</Version>
</PropertyGroup>
<Target Name="EVPack" BeforeTargets="Pack">
<Message Text="EVPack: Forcing NuGet Version = $(GeneratedVersion)" Importance="High" />
<PropertyGroup>
<Version>$(GeneratedVersion)</Version>
</PropertyGroup>
</Target>
<ItemGroup>
<None Remove="icon.png" />
<None Include="..\LICENSE">
<Pack>True</Pack>
<PackagePath>\</PackagePath>
</None>
<None Include="icon.png">
<Pack>True</Pack>
<PackagePath>
</PackagePath>
</None>
</ItemGroup>
<ItemGroup>
<PackageReference Include="EonaCat.Json" Version="2.2.0" />
<PackageReference Include="EonaCat.Versioning" Version="1.2.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="EonaCat.Versioning.Helpers" Version="1.0.2" />
<PackageReference Include="Microsoft.CSharp" Version="4.7.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="10.0.3" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="10.0.3" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="10.0.3" />
<PackageReference Include="System.Net.Http" Version="4.3.4" />
<PackageReference Include="System.Threading.Channels" Version="10.0.3" />
</ItemGroup>
<ItemGroup>
<None Update="LICENSE.md">
<Pack>True</Pack>
<PackagePath>\</PackagePath>
</None>
<None Update="README.md">
<Pack>True</Pack>
<PackagePath>\</PackagePath>
</None>
</ItemGroup>
</Project>

View File

@@ -0,0 +1,291 @@
using EonaCat.LogStack.Boosters;
using EonaCat.LogStack.Core;
using EonaCat.LogStack.Flows;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
namespace EonaCat.LogStack
{
/// <summary>
/// EonaCat logger with flow-based architecture, booster, and pre-build modifier hook.
/// Designed for zero-allocation logging paths and superior memory efficiency.
/// </summary>
public sealed class EonaCatLogStack : IAsyncDisposable
{
private readonly string _category;
private readonly List<IFlow> _flows = new List<IFlow>();
private readonly List<IBooster> _boosters = new List<IBooster>();
private readonly ConcurrentBag<IFlow> _concurrentFlows = new ConcurrentBag<IFlow>();
private readonly LogLevel _minimumLevel;
private readonly TimestampMode _timestampMode;
private volatile bool _isDisposed;
private long _totalLoggedCount;
private long _totalDroppedCount;
private readonly List<ActionRef<LogEventBuilder>> _modifiers = new List<ActionRef<LogEventBuilder>>();
public delegate void ActionRef<T>(ref T item);
private readonly object _modifiersLock = new object();
public event EventHandler<LogMessage> OnLog;
/// <summary>
/// Creates a new logger instance
/// </summary>
public EonaCatLogStack(string category = "Application",
LogLevel minimumLevel = LogLevel.Trace,
TimestampMode timestampMode = TimestampMode.Utc)
{
_category = category ?? throw new ArgumentNullException(nameof(category));
_minimumLevel = minimumLevel;
_timestampMode = timestampMode;
}
/// <summary>
/// Adds a flow (output destination) to this logger
/// </summary>
public EonaCatLogStack AddFlow(IFlow flow)
{
if (flow == null)
{
throw new ArgumentNullException(nameof(flow));
}
lock (_flows) { _flows.Add(flow); }
_concurrentFlows.Add(flow);
return this;
}
/// <summary>
/// Adds a booster to this logger
/// </summary>
public EonaCatLogStack AddBooster(IBooster booster)
{
if (booster == null)
{
throw new ArgumentNullException(nameof(booster));
}
lock (_boosters) { _boosters.Add(booster); }
return this;
}
/// <summary>
/// Removes a flow by name
/// </summary>
public EonaCatLogStack RemoveFlow(string name)
{
lock (_flows) { _flows.RemoveAll(f => f.Name == name); }
return this;
}
/// <summary>
/// Removes a booster by name
/// </summary>
public EonaCatLogStack RemoveBooster(string name)
{
lock (_boosters) { _boosters.RemoveAll(b => b.Name == name); }
return this;
}
/// <summary>
/// Adds a modifier to run before building the LogEvent.
/// Return false to cancel logging.
/// </summary>
public EonaCatLogStack AddModifier(ActionRef<LogEventBuilder> modifier)
{
if (modifier == null)
{
throw new ArgumentNullException(nameof(modifier));
}
lock (_modifiersLock) { _modifiers.Add(modifier); }
return this;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Log(string message, LogLevel level = LogLevel.Information)
{
if (_isDisposed || level < _minimumLevel)
{
return;
}
var builder = new LogEventBuilder()
.WithLevel(level)
.WithCategory(_category)
.WithMessage(message)
.WithTimestamp(GetTimestamp());
ProcessLogEvent(ref builder);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Log(LogLevel level, Exception exception, string message)
{
if (_isDisposed || level < _minimumLevel)
{
return;
}
var builder = new LogEventBuilder()
.WithLevel(level)
.WithCategory(_category)
.WithMessage(message)
.WithException(exception)
.WithTimestamp(GetTimestamp());
ProcessLogEvent(ref builder);
OnLog?.Invoke(this, new LogMessage
{
Level = level,
Exception = exception,
Message = message,
Category = _category,
Origin = null
});
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Log(LogLevel level, string message, params (string Key, object Value)[] properties)
{
if (_isDisposed || level < _minimumLevel)
{
return;
}
var builder = new LogEventBuilder()
.WithLevel(level)
.WithCategory(_category)
.WithMessage(message)
.WithTimestamp(GetTimestamp());
foreach (var (key, value) in properties)
{
builder.WithProperty(key, value);
}
ProcessLogEvent(ref builder);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)] public void Trace(string message) => Log(LogLevel.Trace, message);
[MethodImpl(MethodImplOptions.AggressiveInlining)] public void Debug(string message) => Log(LogLevel.Debug, message);
[MethodImpl(MethodImplOptions.AggressiveInlining)] public void Information(string message) => Log(LogLevel.Information, message);
[MethodImpl(MethodImplOptions.AggressiveInlining)] public void Warning(string message) => Log(LogLevel.Warning, message);
[MethodImpl(MethodImplOptions.AggressiveInlining)] public void Warning(Exception ex, string message) => Log(LogLevel.Warning, ex, message);
[MethodImpl(MethodImplOptions.AggressiveInlining)] public void Error(string message) => Log(LogLevel.Error, message);
[MethodImpl(MethodImplOptions.AggressiveInlining)] public void Error(Exception ex, string message) => Log(LogLevel.Error, ex, message);
[MethodImpl(MethodImplOptions.AggressiveInlining)] public void Critical(string message) => Log(LogLevel.Critical, message);
[MethodImpl(MethodImplOptions.AggressiveInlining)] public void Critical(Exception ex, string message) => Log(LogLevel.Critical, ex, message);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessLogEvent(ref LogEventBuilder builder)
{
// Apply boosters
lock (_boosters)
{
foreach (var booster in _boosters)
{
try
{
if (!booster.Boost(ref builder))
{
return; // filtered out
}
}
catch { }
}
}
// Apply modifiers
foreach (var mod in _modifiers)
{
try
{
mod(ref builder);
}
catch { }
}
var logEvent = builder.Build();
Interlocked.Increment(ref _totalLoggedCount);
// Blast to flows
foreach (var flow in _concurrentFlows)
{
try
{
var result = flow.BlastAsync(logEvent).GetAwaiter().GetResult();
if (result == WriteResult.Dropped)
{
Interlocked.Increment(ref _totalDroppedCount);
}
}
catch { }
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private long GetTimestamp()
{
switch (_timestampMode)
{
case TimestampMode.Local: return DateTime.Now.Ticks;
case TimestampMode.HighPrecision: return System.Diagnostics.Stopwatch.GetTimestamp();
default: return DateTime.UtcNow.Ticks;
}
}
public async Task FlushAsync(CancellationToken cancellationToken = default)
{
var tasks = _concurrentFlows.Select(f => f.FlushAsync(cancellationToken));
await Task.WhenAll(tasks).ConfigureAwait(false);
}
public LoggerDiagnostics GetDiagnostics()
{
var flowDiagnostics = _concurrentFlows
.Select(f => f is FlowBase fb ? fb.GetDiagnostics() : null)
.Where(d => d != null)
.ToList();
return new LoggerDiagnostics
{
Category = _category,
MinimumLevel = _minimumLevel,
TotalLogged = Interlocked.Read(ref _totalLoggedCount),
TotalDropped = Interlocked.Read(ref _totalDroppedCount),
FlowCount = _flows.Count,
BoosterCount = _boosters.Count,
Flows = flowDiagnostics
};
}
public async ValueTask DisposeAsync()
{
if (_isDisposed)
{
return;
}
_isDisposed = true;
await FlushAsync().ConfigureAwait(false);
var disposeTasks = _concurrentFlows.Select(f => f.DisposeAsync().AsTask());
await Task.WhenAll(disposeTasks).ConfigureAwait(false);
GC.SuppressFinalize(this);
}
}
}

View File

@@ -0,0 +1,22 @@
using EonaCat.LogStack.Core;
using System;
namespace EonaCat.LogStack.Boosters;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Base class for boosters that need configuration
/// </summary>
public abstract class BoosterBase : IBooster
{
protected BoosterBase(string name)
{
Name = name ?? throw new ArgumentNullException(nameof(name));
}
public string Name { get; }
public abstract bool Boost(ref LogEventBuilder builder);
}

View File

@@ -0,0 +1,25 @@
using EonaCat.LogStack.Core;
using System;
using System.Runtime.CompilerServices;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
namespace EonaCat.LogStack.Boosters
{
public sealed class AppBooster : BoosterBase
{
private static readonly string AppName = AppDomain.CurrentDomain.FriendlyName;
private static readonly string AppBase = AppDomain.CurrentDomain.BaseDirectory;
public AppBooster() : base("App") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("App", AppName);
builder.WithProperty("AppBase", AppBase);
return true;
}
}
}

View File

@@ -0,0 +1,34 @@
using EonaCat.LogStack.Core;
using System;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Adds application name and version to log events
/// </summary>
public sealed class ApplicationBooster : BoosterBase
{
private readonly string _applicationName;
private readonly string? _version;
public ApplicationBooster(string applicationName, string? version = null) : base("Application")
{
_applicationName = applicationName ?? throw new ArgumentNullException(nameof(applicationName));
_version = version;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("Application", _applicationName);
if (_version != null)
{
builder.WithProperty("Version", _version);
}
return true;
}
}

View File

@@ -0,0 +1,41 @@
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
namespace EonaCat.LogStack.Boosters;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Adds custom properties from a callback
/// </summary>
public sealed class CallbackBooster : BoosterBase
{
private readonly Func<Dictionary<string, object?>> _propertiesCallback;
public CallbackBooster(string name, Func<Dictionary<string, object?>> propertiesCallback) : base(name)
{
_propertiesCallback = propertiesCallback ?? throw new ArgumentNullException(nameof(propertiesCallback));
}
public override bool Boost(ref LogEventBuilder builder)
{
try
{
var properties = _propertiesCallback();
if (properties != null)
{
foreach (var kvp in properties)
{
builder.WithProperty(kvp.Key, kvp.Value);
}
}
}
catch
{
// Swallow exceptions in boosters to prevent logging failures
}
return true;
}
}

View File

@@ -0,0 +1,27 @@
using EonaCat.LogStack.Core;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Adds correlation ID from Activity or custom source
/// </summary>
public sealed class CorrelationIdBooster : BoosterBase
{
public CorrelationIdBooster() : base("CorrelationId") { }
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
var activity = Activity.Current;
if (activity != null)
{
builder.WithProperty("CorrelationId", activity.Id ?? activity.TraceId.ToString());
}
return true;
}
}

View File

@@ -0,0 +1,37 @@
using EonaCat.LogStack.Core;
using System;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Adds a custom text property to log events
/// </summary>
public sealed class CustomTextBooster : BoosterBase
{
private readonly string _propertyName;
private readonly string _text;
/// <summary>
/// Creates a new booster that adds a custom text property to logs
/// </summary>
/// <param name="propertyName">The name of the property to add</param>
/// <param name="text">The text value to set</param>
public CustomTextBooster(string propertyName, string text)
: base("CustomText")
{
_propertyName = propertyName ?? throw new ArgumentNullException(nameof(propertyName));
_text = text ?? throw new ArgumentNullException(nameof(text));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty(_propertyName, _text);
return true;
}
}
}

View File

@@ -0,0 +1,21 @@
using EonaCat.LogStack.Core;
using System;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class DateBooster : BoosterBase
{
public DateBooster() : base("Date") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("Date", DateTime.UtcNow.ToString("yyyy-MM-dd"));
return true;
}
}
}

View File

@@ -0,0 +1,27 @@
using EonaCat.LogStack.Core;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Adds environment name to log events
/// </summary>
public sealed class EnvironmentBooster : BoosterBase
{
private readonly string _environmentName;
public EnvironmentBooster(string environmentName) : base("Environment")
{
_environmentName = environmentName ?? "Production";
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("Environment", _environmentName);
return true;
}
}

View File

@@ -0,0 +1,23 @@
using EonaCat.LogStack.Core;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class FrameworkBooster : BoosterBase
{
private static readonly string FrameworkDesc = RuntimeInformation.FrameworkDescription;
public FrameworkBooster() : base("Framework") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("Framework", FrameworkDesc);
return true;
}
}
}

View File

@@ -0,0 +1,28 @@
using EonaCat.LogStack.Core;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Filters log events based on level
/// </summary>
public sealed class LevelFilterBooster : BoosterBase
{
private readonly LogLevel _minimumLevel;
public LevelFilterBooster(LogLevel minimumLevel) : base("LevelFilter")
{
_minimumLevel = minimumLevel;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
// Filter will be handled by the pipeline, this is a no-op booster
// Actual filtering happens in the logger pipeline based on configuration
return true;
}
}

View File

@@ -0,0 +1,25 @@
using EonaCat.LogStack.Core;
using System;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Adds machine name to log events
/// </summary>
public sealed class MachineNameBooster : BoosterBase
{
private static readonly string MachineName = Environment.MachineName;
public MachineNameBooster() : base("MachineName") { }
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("MachineName", MachineName);
return true;
}
}

View File

@@ -0,0 +1,22 @@
using EonaCat.LogStack.Core;
using System;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class MemoryBooster : BoosterBase
{
public MemoryBooster() : base("Memory") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
var memoryMB = GC.GetTotalMemory(false) / 1024 / 1024;
builder.WithProperty("Memory", memoryMB);
return true;
}
}
}

View File

@@ -0,0 +1,23 @@
using EonaCat.LogStack.Core;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class OSBooster : BoosterBase
{
private static readonly string OSDesc = RuntimeInformation.OSDescription;
public OSBooster() : base("OS") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("OS", OSDesc);
return true;
}
}
}

View File

@@ -0,0 +1,24 @@
using EonaCat.LogStack.Core;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class ProcStartBooster : BoosterBase
{
private static readonly DateTime ProcessStart = Process.GetCurrentProcess().StartTime;
public ProcStartBooster() : base("ProcStart") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("ProcStart", ProcessStart);
return true;
}
}
}

View File

@@ -0,0 +1,25 @@
using EonaCat.LogStack.Core;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Adds process ID to log events
/// </summary>
public sealed class ProcessIdBooster : BoosterBase
{
private static readonly int ProcessId = Process.GetCurrentProcess().Id;
public ProcessIdBooster() : base("ProcessId") { }
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("ProcessId", ProcessId);
return true;
}
}

View File

@@ -0,0 +1,21 @@
using EonaCat.LogStack.Core;
using System;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class ThreadIdBooster : BoosterBase
{
public ThreadIdBooster() : base("ThreadId") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("ThreadId", Environment.CurrentManagedThreadId);
return true;
}
}
}

View File

@@ -0,0 +1,21 @@
using EonaCat.LogStack.Core;
using System.Runtime.CompilerServices;
using System.Threading;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class ThreadNameBooster : BoosterBase
{
public ThreadNameBooster() : base("ThreadName") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("ThreadName", Thread.CurrentThread.Name ?? "n/a");
return true;
}
}
}

View File

@@ -0,0 +1,21 @@
using EonaCat.LogStack.Core;
using System;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class TicksBooster : BoosterBase
{
public TicksBooster() : base("Ticks") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("Ticks", DateTime.UtcNow.Ticks);
return true;
}
}
}

View File

@@ -0,0 +1,21 @@
using EonaCat.LogStack.Core;
using System;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class TimeBooster : BoosterBase
{
public TimeBooster() : base("Time") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("Time", DateTime.UtcNow.ToString("HH:mm:ss.fff"));
return true;
}
}
}

View File

@@ -0,0 +1,36 @@
using EonaCat.LogStack.Core;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Adds timestamp in multiple formats
/// </summary>
public sealed class TimestampBooster : BoosterBase
{
private readonly TimestampMode _mode;
public TimestampBooster(TimestampMode mode = TimestampMode.Utc) : base("Timestamp")
{
_mode = mode;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
var timestamp = _mode switch
{
TimestampMode.Local => DateTime.Now.Ticks,
TimestampMode.HighPrecision => Stopwatch.GetTimestamp(),
_ => DateTime.UtcNow.Ticks
};
builder.WithTimestamp(timestamp);
return true;
}
}

View File

@@ -0,0 +1,25 @@
using EonaCat.LogStack.Core;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class UptimeBooster : BoosterBase
{
private static readonly DateTime ProcessStart = Process.GetCurrentProcess().StartTime;
public UptimeBooster() : base("Uptime") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
var uptime = (DateTime.Now - ProcessStart).TotalSeconds;
builder.WithProperty("Uptime", uptime);
return true;
}
}
}

View File

@@ -0,0 +1,23 @@
using EonaCat.LogStack.Core;
using System;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Boosters
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class UserBooster : BoosterBase
{
private static readonly string UserName = Environment.UserName;
public UserBooster() : base("User") { }
[System.Runtime.CompilerServices.MethodImpl(MethodImplOptions.AggressiveInlining)]
public override bool Boost(ref LogEventBuilder builder)
{
builder.WithProperty("User", UserName);
return true;
}
}
}

View File

@@ -0,0 +1,74 @@
using System;
namespace EonaCat.LogStack;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Colors to use when writing to the console.
/// </summary>
public class ColorSchema
{
/// <summary>
/// The color to use for critical messages.
/// </summary>
public ColorScheme Critical = new(ConsoleColor.DarkRed, ConsoleColor.Black);
/// <summary>
/// The color to use for debug messages.
/// </summary>
public ColorScheme Debug = new(ConsoleColor.Green, ConsoleColor.Black);
/// <summary>
/// The color to use for error messages.
/// </summary>
public ColorScheme Error = new(ConsoleColor.Red, ConsoleColor.Black);
/// <summary>
/// The color to use for informational messages.
/// </summary>
public ColorScheme Info = new(ConsoleColor.Blue, ConsoleColor.Black);
/// <summary>
/// The color to use for emergency messages.
/// </summary>
public ColorScheme Trace = new(ConsoleColor.Cyan, ConsoleColor.Black);
/// <summary>
/// The color to use for alert messages.
/// </summary>
public ColorScheme Traffic = new(ConsoleColor.DarkMagenta, ConsoleColor.Black);
/// <summary>
/// The color to use for warning messages.
/// </summary>
public ColorScheme Warning = new(ConsoleColor.DarkYellow, ConsoleColor.Black);
}
/// <summary>
/// Color scheme for logging messages.
/// </summary>
public class ColorScheme
{
/// <summary>
/// Background color.
/// </summary>
public ConsoleColor Background = Console.BackgroundColor;
/// <summary>
/// Foreground color.
/// </summary>
public ConsoleColor Foreground = Console.ForegroundColor;
/// <summary>
/// Instantiates a new color scheme.
/// </summary>
/// <param name="foreground">Foreground color.</param>
/// <param name="background">Background color.</param>
public ColorScheme(ConsoleColor foreground, ConsoleColor background)
{
Foreground = foreground;
Background = background;
}
}

View File

@@ -0,0 +1,11 @@
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
namespace EonaCat.LogStack.EonaCatLogStackCore
{
public enum CompressionFormat
{
None,
GZip,
}
}

View File

@@ -0,0 +1,59 @@
namespace EonaCat.LogStack.Core;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Defines the severity level of log entries
/// </summary>
public enum LogLevel : byte
{
None = 0,
Trace = 1,
Debug = 2,
Information = 3,
Warning = 4,
Error = 5,
Critical = 6,
}
/// <summary>
/// Result of a log write operation
/// </summary>
public enum WriteResult : byte
{
Success = 0,
Dropped = 1,
Failed = 2,
FlowDisabled = 3,
LevelFiltered = 4,
NoBlastZone = 5
}
/// <summary>
/// Strategy for handling backpressure in flows
/// </summary>
public enum BackpressureStrategy : byte
{
/// <summary>Wait for capacity to become available</summary>
Wait = 0,
/// <summary>Drop the newest incoming message</summary>
DropNewest = 1,
/// <summary>Drop the oldest message in the queue</summary>
DropOldest = 2,
/// <summary>Block until space is available (may impact performance)</summary>
Block = 3
}
/// <summary>
/// Options for timestamp generation
/// </summary>
public enum TimestampMode : byte
{
Utc = 0,
Local = 1,
HighPrecision = 2
}

View File

@@ -0,0 +1,14 @@
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
namespace EonaCat.LogStack.EonaCatLogStackCore
{
public enum FileOutputFormat
{
Text,
Json,
Xml,
Csv, // RFC-4180 CSV
StructuredJson, // Machine-readable JSON with correlation IDs
}
}

View File

@@ -0,0 +1,400 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.EonaCatLogStackCore;
using EonaCat.LogStack.Flows;
using System;
using System.Collections.Concurrent;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Audit log severity filter — only these levels are written to the audit trail.
/// </summary>
public enum AuditLevel
{
All,
WarningAndAbove,
ErrorAndAbove,
CriticalOnly,
}
/// <summary>
/// A tamper-evident, append-only audit flow.
///
/// Each entry is written as:
/// SEQ|ISO-TIMESTAMP|LEVEL|CATEGORY|MESSAGE|PROPS|HASH
///
/// Where HASH = SHA-256( previousHash + currentLineWithoutHash ).
/// This creates a hash-chain so any deletion or modification of a past
/// entry invalidates all subsequent hashes, making tampering detectable.
///
/// The file is opened with FileShare.Read only (no concurrent writers).
/// The flow is synchronous-by-design: audit entries must land on disk
/// before the method returns, so <see cref="BlastAsync"/> blocks until
/// the entry is flushed.
/// </summary>
public sealed class AuditFlow : FlowBase
{
private const string Delimiter = "|";
private const int HashLength = 64; // hex SHA-256
private readonly string _filePath;
private readonly AuditLevel _auditLevel;
private readonly bool _includeProperties;
private readonly object _writeLock = new object();
private readonly FileStream _stream;
private readonly StreamWriter _writer;
private long _sequence;
private string _previousHash;
private long _totalEntries;
public AuditFlow(
string directory,
string filePrefix = "audit",
AuditLevel auditLevel = AuditLevel.All,
LogLevel minimumLevel = LogLevel.Trace,
bool includeProperties = true)
: base("Audit:" + directory, minimumLevel)
{
if (directory == null)
{
throw new ArgumentNullException(nameof(directory));
}
if (filePrefix == null)
{
throw new ArgumentNullException(nameof(filePrefix));
}
_auditLevel = auditLevel;
_includeProperties = includeProperties;
// Resolve relative path
if (directory.StartsWith("./", StringComparison.Ordinal))
{
directory = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, directory.Substring(2));
}
Directory.CreateDirectory(directory);
// One file per day, named with date stamp
string date = DateTime.UtcNow.ToString("yyyyMMdd");
_filePath = Path.Combine(directory, $"{filePrefix}_{Environment.MachineName}_{date}.audit");
// Exclusive write access
_stream = new FileStream(
_filePath,
FileMode.Append,
FileAccess.Write,
FileShare.Read, // allow external readers, but no other writers
bufferSize: 4096,
FileOptions.WriteThrough); // WriteThrough = no OS cache, hits disk immediately
_writer = new StreamWriter(_stream, Encoding.UTF8) { AutoFlush = true };
// Derive starting hash from the last line already in the file (for continuity)
_previousHash = ReadLastHash(directory, filePrefix, date);
_sequence = CountExistingLines(_filePath);
}
/// <summary>Path to the current audit file.</summary>
public string FilePath => _filePath;
/// <summary>Total entries written in this session.</summary>
public long TotalEntries => Interlocked.Read(ref _totalEntries);
/// <summary>
/// Verify the integrity of the audit file by replaying the hash chain.
/// Returns (true, null) if intact, (false, reason) if tampered.
/// </summary>
public static (bool ok, string reason) Verify(string filePath)
{
if (!File.Exists(filePath))
{
return (false, "File not found.");
}
string previousHash = new string('0', HashLength);
long expectedSeq = 1;
foreach (string raw in File.ReadLines(filePath, Encoding.UTF8))
{
if (string.IsNullOrWhiteSpace(raw) || raw.StartsWith("#"))
{
continue;
}
int lastPipe = raw.LastIndexOf(Delimiter, StringComparison.Ordinal);
if (lastPipe < 0)
{
return (false, $"Malformed line (no delimiter): {Truncate(raw, 120)}");
}
string body = raw.Substring(0, lastPipe);
string storedHash = raw.Substring(lastPipe + 1).Trim();
if (storedHash.Length != HashLength)
{
return (false, $"Bad hash length on line {expectedSeq}: '{storedHash}'");
}
string computedHash = ComputeHash(previousHash, body);
if (!string.Equals(storedHash, computedHash, StringComparison.OrdinalIgnoreCase))
{
return (false, $"Hash mismatch on sequence {expectedSeq}. " +
$"Expected {computedHash}, found {storedHash}. " +
$"Entry may have been tampered with.");
}
// Verify sequence number (first field)
int firstPipe = body.IndexOf(Delimiter, StringComparison.Ordinal);
if (firstPipe > 0)
{
string seqStr = body.Substring(0, firstPipe);
if (long.TryParse(seqStr, out long seq) && seq != expectedSeq)
{
return (false, $"Sequence gap: expected {expectedSeq}, found {seq}.");
}
}
previousHash = computedHash;
expectedSeq++;
}
return (true, null);
}
public override Task<WriteResult> BlastAsync(
LogEvent logEvent,
CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (!PassesAuditLevel(logEvent.Level))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
WriteEntry(logEvent);
return Task.FromResult(WriteResult.Success);
}
public override Task<WriteResult> BlastBatchAsync(
ReadOnlyMemory<LogEvent> logEvents,
CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return Task.FromResult(WriteResult.FlowDisabled);
}
foreach (var e in logEvents.ToArray())
{
if (IsLogLevelEnabled(e) && PassesAuditLevel(e.Level))
{
WriteEntry(e);
}
}
return Task.FromResult(WriteResult.Success);
}
public override Task FlushAsync(CancellationToken cancellationToken = default)
{
lock (_writeLock)
{
_writer.Flush();
_stream.Flush(flushToDisk: true);
}
return Task.CompletedTask;
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
lock (_writeLock)
{
try { _writer.Flush(); } catch { }
try { _stream.Flush(true); } catch { }
try { _writer.Dispose(); } catch { }
try { _stream.Dispose(); } catch { }
}
await base.DisposeAsync().ConfigureAwait(false);
}
private void WriteEntry(LogEvent log)
{
lock (_writeLock)
{
long seq = Interlocked.Increment(ref _sequence);
var sb = new StringBuilder(256);
sb.Append(seq);
sb.Append(Delimiter);
sb.Append(LogEvent.GetDateTime(log.Timestamp).ToString("O"));
sb.Append(Delimiter);
sb.Append(LevelString(log.Level));
sb.Append(Delimiter);
sb.Append(Escape(log.Category));
sb.Append(Delimiter);
sb.Append(Escape(log.Message.Length > 0 ? log.Message.ToString() : string.Empty));
if (log.Exception != null)
{
sb.Append(Delimiter);
sb.Append("EX=");
sb.Append(Escape(log.Exception.GetType().Name + ": " + log.Exception.Message));
}
if (_includeProperties && log.Properties.Count > 0)
{
sb.Append(Delimiter);
bool first = true;
foreach (var kv in log.Properties.ToArray())
{
if (!first)
{
sb.Append(';');
}
first = false;
sb.Append(Escape(kv.Key)).Append('=').Append(Escape(kv.Value?.ToString() ?? "null"));
}
}
string body = sb.ToString();
string hash = ComputeHash(_previousHash, body);
string line = body + Delimiter + hash;
_writer.WriteLine(line);
// AutoFlush=true + WriteThrough stream = immediate disk write
_previousHash = hash;
Interlocked.Increment(ref _totalEntries);
Interlocked.Increment(ref BlastedCount);
}
}
private bool PassesAuditLevel(LogLevel level) => _auditLevel switch
{
AuditLevel.All => true,
AuditLevel.WarningAndAbove => level >= LogLevel.Warning,
AuditLevel.ErrorAndAbove => level >= LogLevel.Error,
AuditLevel.CriticalOnly => level >= LogLevel.Critical,
_ => true
};
private static string LevelString(LogLevel level) => level switch
{
LogLevel.Trace => "TRACE",
LogLevel.Debug => "DEBUG",
LogLevel.Information => "INFO",
LogLevel.Warning => "WARN",
LogLevel.Error => "ERROR",
LogLevel.Critical => "CRITICAL",
_ => level.ToString().ToUpperInvariant()
};
/// <summary>Replace pipe characters inside field values so the delimiter stays unique.</summary>
private static string Escape(string value)
=> string.IsNullOrEmpty(value) ? string.Empty : value.Replace("|", "\\|").Replace("\r", "\\r").Replace("\n", "\\n");
public static string ComputeHash(string previousHash, string body)
{
if (string.IsNullOrEmpty(previousHash) || string.IsNullOrEmpty(body))
{
throw new ArgumentException("Input values cannot be null or empty.");
}
string inputString = previousHash + "|" + body;
byte[] input = Encoding.UTF8.GetBytes(inputString);
using (SHA256 sha = SHA256.Create())
{
byte[] digest = sha.ComputeHash(input);
return BitConverter.ToString(digest).Replace("-", "").ToLowerInvariant();
}
}
private static string ReadLastHash(string directory, string prefix, string date)
{
string path = Path.Combine(directory, $"{prefix}_{Environment.MachineName}_{date}.audit");
if (!File.Exists(path))
{
return new string('0', HashLength);
}
string lastLine = null;
// Open file with FileShare.ReadWrite to allow reading while it's being written to
using (var fileStream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
using (var reader = new StreamReader(fileStream, Encoding.UTF8))
{
// Read lines from the file
while (!reader.EndOfStream)
{
var line = reader.ReadLine();
if (!string.IsNullOrWhiteSpace(line) && !line.StartsWith("#"))
{
lastLine = line;
}
}
}
if (lastLine == null)
{
return new string('0', HashLength);
}
int lastPipe = lastLine.LastIndexOf(Delimiter, StringComparison.Ordinal);
return lastPipe >= 0 ? lastLine.Substring(lastPipe + 1).Trim() : new string('0', HashLength);
}
private static long CountExistingLines(string path)
{
if (!File.Exists(path))
{
return 0;
}
long count = 0;
// Open the file with FileShare.ReadWrite to allow concurrent read/write access
using (var fileStream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
using (var reader = new StreamReader(fileStream, Encoding.UTF8))
{
while (!reader.EndOfStream)
{
var line = reader.ReadLine();
if (!string.IsNullOrWhiteSpace(line) && !line.StartsWith("#"))
{
count++;
}
}
}
return count;
}
private static string Truncate(string s, int max)
=> s.Length <= max ? s : s.Substring(0, max) + "...";
}
}

View File

@@ -0,0 +1,285 @@
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// console flow with color support and minimal allocations
/// Uses a ColorSchema for configurable colors
/// </summary>
public sealed class ConsoleFlow : FlowBase
{
private readonly bool _useColors;
private readonly TimestampMode _timestampMode;
private readonly StringBuilder _buffer = new(1024);
private readonly object _consoleLock = new();
private readonly ColorSchema _colors;
private readonly string _template;
private List<Action<LogEvent, StringBuilder>> _compiledTemplate;
public ConsoleFlow(
LogLevel minimumLevel = LogLevel.Trace,
bool useColors = true,
TimestampMode timestampMode = TimestampMode.Local,
ColorSchema? colorSchema = null,
string template = "[{ts}] [{tz}] [Host: {host}] [Category: {category}] [Thread: {thread}] [{logtype}] {message}{props}")
: base("Console", minimumLevel)
{
_useColors = useColors;
_timestampMode = timestampMode;
_colors = colorSchema ?? new ColorSchema();
_template = template ?? throw new ArgumentNullException(nameof(template));
CompileTemplate(_template);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
WriteToConsole(logEvent);
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
public override Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return Task.FromResult(WriteResult.FlowDisabled);
}
foreach (var logEvent in logEvents.Span)
{
if (logEvent.Level >= MinimumLevel)
{
WriteToConsole(logEvent);
Interlocked.Increment(ref BlastedCount);
}
}
return Task.FromResult(WriteResult.Success);
}
private void WriteToConsole(LogEvent logEvent)
{
lock (_consoleLock)
{
_buffer.Clear();
foreach (var action in _compiledTemplate)
{
action(logEvent, _buffer);
}
if (_useColors && TryGetColor(logEvent.Level, out var color))
{
Console.ForegroundColor = color.Foreground;
}
Console.WriteLine(_buffer.ToString());
if (logEvent.Exception != null)
{
if (_useColors)
{
Console.ForegroundColor = ConsoleColor.DarkRed;
}
Console.WriteLine(logEvent.Exception.ToString());
if (_useColors)
{
Console.ResetColor();
}
}
if (_useColors)
{
Console.ResetColor();
}
}
}
private void CompileTemplate(string template)
{
_compiledTemplate = new List<Action<LogEvent, StringBuilder>>();
int pos = 0;
while (pos < template.Length)
{
int open = template.IndexOf('{', pos);
if (open < 0)
{
string lit = template.Substring(pos);
_compiledTemplate.Add((_, sb) => sb.Append(lit));
break;
}
if (open > pos)
{
string lit = template.Substring(pos, open - pos);
_compiledTemplate.Add((_, sb) => sb.Append(lit));
}
int close = template.IndexOf('}', open);
if (close < 0)
{
string lit = template.Substring(open);
_compiledTemplate.Add((_, sb) => sb.Append(lit));
break;
}
string token = template.Substring(open + 1, close - open - 1);
_compiledTemplate.Add(ResolveToken(token));
pos = close + 1;
}
}
private Action<LogEvent, StringBuilder> ResolveToken(string token)
{
switch (token.ToLowerInvariant())
{
case "ts":
return (log, sb) =>
sb.Append(LogEvent.GetDateTime(log.Timestamp)
.ToString("yyyy-MM-dd HH:mm:ss.fff"));
case "tz":
return (_, sb) =>
sb.Append(_timestampMode == TimestampMode.Local
? TimeZoneInfo.Local.StandardName
: "UTC");
case "host":
return (_, sb) => sb.Append(Environment.MachineName);
case "category":
return (log, sb) =>
{
if (!string.IsNullOrEmpty(log.Category))
{
sb.Append(log.Category);
}
};
case "thread":
return (_, sb) => sb.Append(Thread.CurrentThread.ManagedThreadId);
case "pid":
return (_, sb) => sb.Append(Process.GetCurrentProcess().Id);
case "message":
return (log, sb) => sb.Append(log.Message);
case "props":
return AppendProperties;
case "newline":
return (_, sb) => sb.AppendLine();
case "logtype":
return (log, sb) =>
{
var levelText = GetLevelText(log.Level);
if (_useColors && TryGetColor(log.Level, out var color))
{
Console.ForegroundColor = color.Foreground;
Console.BackgroundColor = color.Background;
Console.Write(sb.ToString());
Console.Write(levelText);
Console.ResetColor();
sb.Clear();
}
else
{
sb.Append(levelText);
}
};
default:
return (_, sb) => sb.Append('{').Append(token).Append('}');
}
}
private void AppendProperties(LogEvent log, StringBuilder sb)
{
if (log.Properties.Count == 0)
{
return;
}
sb.Append(" {");
bool first = true;
foreach (var prop in log.Properties)
{
if (!first)
{
sb.Append(", ");
}
sb.Append(prop.Key);
sb.Append('=');
sb.Append(prop.Value?.ToString() ?? "null");
first = false;
}
sb.Append('}');
}
private bool TryGetColor(LogLevel level, out ColorScheme color)
{
color = level switch
{
LogLevel.Trace => _colors.Trace,
LogLevel.Debug => _colors.Debug,
LogLevel.Information => _colors.Info,
LogLevel.Warning => _colors.Warning,
LogLevel.Error => _colors.Error,
LogLevel.Critical => _colors.Critical,
_ => _colors.Info
};
return color != null;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static string GetLevelText(LogLevel level)
{
return level switch
{
LogLevel.Trace => "TRACE",
LogLevel.Debug => "DEBUG",
LogLevel.Information => "INFO",
LogLevel.Warning => "WARN",
LogLevel.Error => "ERROR",
LogLevel.Critical => "CRITICAL",
_ => "???"
};
}
public override Task FlushAsync(CancellationToken cancellationToken = default)
{
// Console auto-flushes
return Task.CompletedTask;
}
}
}

View File

@@ -0,0 +1,231 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// database flow with batched inserts for any ADO.NET database
/// </summary>
public sealed class DatabaseFlow : FlowBase
{
private const int ChannelCapacity = 4096;
private const int DefaultBatchSize = 128;
private readonly Channel<LogEvent> _channel;
private readonly Task _writerTask;
private readonly CancellationTokenSource _cts;
private readonly Func<DbConnection> _connectionFactory;
private readonly string _tableName;
public DatabaseFlow(
Func<DbConnection> connectionFactory,
string tableName = "Logs",
LogLevel minimumLevel = LogLevel.Trace)
: base($"Database:{tableName}", minimumLevel)
{
_connectionFactory = connectionFactory ?? throw new ArgumentNullException(nameof(connectionFactory));
_tableName = tableName;
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = BoundedChannelFullMode.DropOldest,
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_writerTask = Task.Run(() => ProcessLogEventsAsync(_cts.Token));
}
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try
{
await _writerTask.ConfigureAwait(false);
}
catch (OperationCanceledException) { }
}
private async Task ProcessLogEventsAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
try
{
while (await _channel.Reader.WaitToReadAsync(cancellationToken))
{
while (_channel.Reader.TryRead(out var logEvent))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize)
{
await WriteBatchAsync(batch, cancellationToken).ConfigureAwait(false);
batch.Clear();
}
}
if (batch.Count > 0)
{
await WriteBatchAsync(batch, cancellationToken).ConfigureAwait(false);
batch.Clear();
}
}
if (batch.Count > 0)
{
await WriteBatchAsync(batch, cancellationToken).ConfigureAwait(false);
batch.Clear();
}
}
catch (OperationCanceledException) { }
catch (Exception ex)
{
Console.Error.WriteLine($"DatabaseFlow error: {ex.Message}");
}
}
private async Task WriteBatchAsync(List<LogEvent> batch, CancellationToken cancellationToken)
{
using var connection = _connectionFactory();
await connection.OpenAsync(cancellationToken);
using var transaction = connection.BeginTransaction();
// Build a single SQL command with multiple inserts
var sb = new StringBuilder();
var parameters = new List<DbParameter>();
int paramIndex = 0;
foreach (var logEvent in batch)
{
sb.Append($"INSERT INTO {_tableName} (Timestamp, Level, Category, Message, ThreadId, Exception, Properties) VALUES (");
// Timestamp
var timestampParam = CreateParameter(connection, $"@p{paramIndex++}", LogEvent.GetDateTime(logEvent.Timestamp).ToString("O"));
parameters.Add(timestampParam);
sb.Append(timestampParam.ParameterName).Append(", ");
// Level
var levelParam = CreateParameter(connection, $"@p{paramIndex++}", logEvent.Level.ToString());
parameters.Add(levelParam);
sb.Append(levelParam.ParameterName).Append(", ");
// Category
var categoryParam = CreateParameter(connection, $"@p{paramIndex++}", logEvent.Category ?? string.Empty);
parameters.Add(categoryParam);
sb.Append(categoryParam.ParameterName).Append(", ");
// Message
var messageParam = CreateParameter(connection, $"@p{paramIndex++}", logEvent.Message.ToString());
parameters.Add(messageParam);
sb.Append(messageParam.ParameterName).Append(", ");
// ThreadId
var threadParam = CreateParameter(connection, $"@p{paramIndex++}", logEvent.ThreadId);
parameters.Add(threadParam);
sb.Append(threadParam.ParameterName).Append(", ");
// Exception
object exValue = logEvent.Exception != null
? JsonHelper.ToJson(new
{
type = logEvent.Exception.GetType().FullName,
message = logEvent.Exception.Message,
stackTrace = logEvent.Exception.StackTrace
})
: DBNull.Value;
var exParam = CreateParameter(connection, $"@p{paramIndex++}", exValue);
parameters.Add(exParam);
sb.Append(exParam.ParameterName).Append(", ");
// Properties
object propsValue = logEvent.Properties.Count > 0
? JsonHelper.ToJson(logEvent.Properties)
: DBNull.Value;
var propsParam = CreateParameter(connection, $"@p{paramIndex++}", propsValue);
parameters.Add(propsParam);
sb.Append(propsParam.ParameterName).Append(");");
}
using var command = connection.CreateCommand();
command.Transaction = transaction;
command.CommandText = sb.ToString();
foreach (var p in parameters)
{
command.Parameters.Add(p);
}
await command.ExecuteNonQueryAsync(cancellationToken);
transaction.Commit();
}
private static Dictionary<string, object?> ToDictionary(ReadOnlyMemory<KeyValuePair<string, object?>> properties)
{
var dict = new Dictionary<string, object?>();
foreach (var prop in properties.Span)
{
dict[prop.Key] = prop.Value;
}
return dict;
}
private static DbParameter CreateParameter(DbConnection connection, string name, object value)
{
var p = connection.CreateCommand().CreateParameter();
p.ParameterName = name;
p.Value = value ?? DBNull.Value;
return p;
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try
{
await _writerTask.ConfigureAwait(false);
}
catch { }
_cts.Dispose();
await base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,300 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.EonaCatLogStackCore;
using EonaCat.LogStack.Extensions;
using Microsoft.Extensions.Primitives;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Diagnostic counters snapshot emitted on a regular interval.
/// </summary>
public sealed class DiagnosticsSnapshot
{
public DateTime CapturedAt { get; internal set; }
public double CpuPercent { get; internal set; }
public long WorkingSetBytes { get; internal set; }
public long GcGen0 { get; internal set; }
public long GcGen1 { get; internal set; }
public long GcGen2 { get; internal set; }
public long ThreadCount { get; internal set; }
public long HandleCount { get; internal set; }
public double UptimeSeconds { get; internal set; }
public Dictionary<string, object> Custom { get; internal set; }
}
/// <summary>
/// A flow that periodically captures process diagnostics (CPU, memory, GC, threads)
/// and writes them as structured log events. Also acts as a pass-through: every
/// normal log event optionally gets runtime metrics injected as properties.
///
/// Additionally exposes an in-process <see cref="Counter"/> registry so application
/// code can record business metrics (request count, error rate, etc.) that are
/// flushed alongside diagnostic snapshots.
/// </summary>
public sealed class DiagnosticsFlow : FlowBase
{
/// <summary>Counter for business metrics.</summary>
public sealed class Counter
{
private long _value;
public string Name { get; }
public Counter(string name) { Name = name; }
public void Increment() { Interlocked.Increment(ref _value); }
public void IncrementBy(long delta) { Interlocked.Add(ref _value, delta); }
public void Reset() { Interlocked.Exchange(ref _value, 0); }
public long Value { get { return Interlocked.Read(ref _value); } }
}
private readonly ConcurrentDictionary<string, Counter> _counters
= new ConcurrentDictionary<string, Counter>(StringComparer.OrdinalIgnoreCase);
private readonly TimeSpan _snapshotInterval;
private readonly bool _injectIntoEvents;
private readonly bool _writeSnapshotEvents;
private readonly string _snapshotCategory;
private readonly IFlow _forwardTo;
private readonly Func<Dictionary<string, object>> _customMetricsFactory;
private readonly CancellationTokenSource _cts = new CancellationTokenSource();
private readonly Thread _samplerThread;
private readonly Stopwatch _uptime = Stopwatch.StartNew();
private volatile DiagnosticsSnapshot _latest;
private TimeSpan _lastCpuTime;
private DateTime _lastCpuSample;
private readonly Process _proc;
public DiagnosticsSnapshot LatestSnapshot { get { return _latest; } }
public DiagnosticsFlow(
TimeSpan snapshotInterval = default(TimeSpan),
bool injectIntoEvents = false,
bool writeSnapshotEvents = true,
string snapshotCategory = "Diagnostics",
IFlow forwardTo = null,
LogLevel minimumLevel = LogLevel.Trace,
Func<Dictionary<string, object>> customMetrics = null)
: base("Diagnostics", minimumLevel)
{
_snapshotInterval = snapshotInterval == default(TimeSpan)
? TimeSpan.FromSeconds(60)
: snapshotInterval;
_injectIntoEvents = injectIntoEvents;
_writeSnapshotEvents = writeSnapshotEvents;
_snapshotCategory = snapshotCategory ?? "Diagnostics";
_forwardTo = forwardTo;
_customMetricsFactory = customMetrics;
_proc = Process.GetCurrentProcess();
_lastCpuTime = _proc.TotalProcessorTime;
_lastCpuSample = DateTime.UtcNow;
_samplerThread = new Thread(SamplerLoop)
{
IsBackground = true,
Name = "DiagnosticsFlow.Sampler",
Priority = ThreadPriority.BelowNormal
};
_samplerThread.Start();
}
/// <summary>Gets or creates a named counter.</summary>
public Counter GetCounter(string name)
{
if (name == null)
{
throw new ArgumentNullException("name");
}
return _counters.GetOrAdd(name, n => new Counter(n));
}
/// <summary>Current value of a named counter (0 if not yet created).</summary>
public long ReadCounter(string name)
{
Counter c;
return _counters.TryGetValue(name, out c) ? c.Value : 0;
}
public override Task<WriteResult> BlastAsync(
LogEvent logEvent,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_injectIntoEvents)
{
DiagnosticsSnapshot snap = _latest;
if (snap != null)
{
logEvent.Properties.TryAdd("diag.mem_mb",(snap.WorkingSetBytes / 1024 / 1024).ToString());
logEvent.Properties.TryAdd("diag.cpu",snap.CpuPercent.ToString("F1"));
logEvent.Properties.TryAdd("diag.threads",snap.ThreadCount.ToString());
}
}
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
public override Task<WriteResult> BlastBatchAsync(
ReadOnlyMemory<LogEvent> logEvents,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled)
{
return Task.FromResult(WriteResult.FlowDisabled);
}
foreach (LogEvent e in logEvents.ToArray())
{
if (IsLogLevelEnabled(e))
{
BlastAsync(e, cancellationToken);
}
}
return Task.FromResult(WriteResult.Success);
}
public override Task FlushAsync(CancellationToken cancellationToken = default(CancellationToken))
=> Task.FromResult(0);
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_cts.Cancel();
_samplerThread.Join(TimeSpan.FromSeconds(3));
_cts.Dispose();
await base.DisposeAsync().ConfigureAwait(false);
}
private void SamplerLoop()
{
while (!_cts.Token.IsCancellationRequested)
{
try
{
Thread.Sleep(_snapshotInterval);
DiagnosticsSnapshot snap = Capture();
_latest = snap;
if (_writeSnapshotEvents && _forwardTo != null)
{
LogEvent ev = BuildSnapshotEvent(snap);
_forwardTo.BlastAsync(ev).GetAwaiter().GetResult();
}
}
catch (ThreadInterruptedException) { break; }
catch (Exception ex)
{
Console.Error.WriteLine("[DiagnosticsFlow] Sampler error: " + ex.Message);
}
}
}
private DiagnosticsSnapshot Capture()
{
_proc.Refresh();
DateTime now = DateTime.UtcNow;
TimeSpan cpuNow = _proc.TotalProcessorTime;
double elapsed = (now - _lastCpuSample).TotalSeconds;
double cpu = elapsed > 0
? (cpuNow - _lastCpuTime).TotalSeconds / elapsed / Environment.ProcessorCount * 100.0
: 0;
_lastCpuTime = cpuNow;
_lastCpuSample = now;
Dictionary<string, object> custom = null;
if (_customMetricsFactory != null)
{
try { custom = _customMetricsFactory(); } catch { }
}
// Append counters to custom dict
if (_counters.Count > 0)
{
if (custom == null)
{
custom = new Dictionary<string, object>(StringComparer.OrdinalIgnoreCase);
}
foreach (KeyValuePair<string, Counter> kv in _counters)
{
custom["counter." + kv.Key] = kv.Value.Value;
}
}
return new DiagnosticsSnapshot
{
CapturedAt = now,
CpuPercent = Math.Round(cpu, 2),
WorkingSetBytes = _proc.WorkingSet64,
GcGen0 = GC.CollectionCount(0),
GcGen1 = GC.CollectionCount(1),
GcGen2 = GC.CollectionCount(2),
ThreadCount = _proc.Threads.Count,
HandleCount = _proc.HandleCount,
UptimeSeconds = _uptime.Elapsed.TotalSeconds,
Custom = custom
};
}
private LogEvent BuildSnapshotEvent(DiagnosticsSnapshot snap)
{
var sb = new StringBuilder(256);
sb.AppendFormat(
"Diagnostics | CPU={0:F1}% Mem={1}MB GC=[{2},{3},{4}] Threads={5} Handles={6} Uptime={7:F0}s",
snap.CpuPercent,
snap.WorkingSetBytes / 1024 / 1024,
snap.GcGen0, snap.GcGen1, snap.GcGen2,
snap.ThreadCount,
snap.HandleCount,
snap.UptimeSeconds);
var ev = new LogEvent
{
Level = LogLevel.Information,
Category = _snapshotCategory,
Message = new StringSegment(sb.ToString()),
Timestamp = snap.CapturedAt.Ticks
};
ev.Properties.TryAdd("cpu_pct", snap.CpuPercent.ToString("F2"));
ev.Properties.TryAdd("mem_bytes", snap.WorkingSetBytes.ToString());
ev.Properties.TryAdd("gc_gen0", snap.GcGen0.ToString());
ev.Properties.TryAdd("gc_gen1", snap.GcGen1.ToString());
ev.Properties.TryAdd("gc_gen2", snap.GcGen2.ToString());
ev.Properties.TryAdd("threads", snap.ThreadCount.ToString());
ev.Properties.TryAdd("handles", snap.HandleCount.ToString());
ev.Properties.TryAdd("uptime_s", snap.UptimeSeconds.ToString("F0"));
if (snap.Custom != null)
{
foreach (KeyValuePair<string, object> kv in snap.Custom)
{
ev.Properties.TryAdd(kv.Key, kv.Value != null ? kv.Value.ToString() : "null");
}
}
return ev;
}
}
}

View File

@@ -0,0 +1,197 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// logging flow that sends messages to a Discord channel via webhook.
/// </summary>
public sealed class DiscordFlow : FlowBase, IAsyncDisposable
{
private const int ChannelCapacity = 4096;
private const int DefaultBatchSize = 10;
private readonly Channel<LogEvent> _channel;
private readonly Task _workerTask;
private readonly CancellationTokenSource _cts;
private readonly HttpClient _httpClient;
private readonly string _webhookUrl;
public DiscordFlow(
string webhookUrl,
string botName,
LogLevel minimumLevel = LogLevel.Information)
: base("Discord", minimumLevel)
{
_webhookUrl = webhookUrl ?? throw new ArgumentNullException(nameof(webhookUrl));
_httpClient = new HttpClient();
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = BoundedChannelFullMode.DropOldest,
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_workerTask = Task.Run(() => ProcessQueueAsync(botName, _cts.Token));
}
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
private async Task ProcessQueueAsync(string botName, CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
try
{
while (await _channel.Reader.WaitToReadAsync(cancellationToken))
{
while (_channel.Reader.TryRead(out var logEvent))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize)
{
await SendBatchAsync(botName, batch, cancellationToken);
batch.Clear();
}
}
if (batch.Count > 0)
{
await SendBatchAsync(botName, batch, cancellationToken);
batch.Clear();
}
}
}
catch (OperationCanceledException) { }
catch (Exception ex)
{
Console.Error.WriteLine($"DiscordFlow error: {ex.Message}");
}
}
private async Task SendBatchAsync(string botName, List<LogEvent> batch, CancellationToken cancellationToken)
{
foreach (var logEvent in batch)
{
var content = new
{
username = botName,
embeds = new[]
{
new
{
title = logEvent.Level.ToString(),
description = logEvent.Message,
color = GetDiscordColor(logEvent.Level),
timestamp = LogEvent.GetDateTime(logEvent.Timestamp).ToString("O"),
fields = logEvent.Properties.Count > 0
? GetFields(logEvent)
: Array.Empty<object>()
}
}
};
var json = JsonHelper.ToJson(content);
using var stringContent = new StringContent(json, Encoding.UTF8, "application/json");
await _httpClient.PostAsync(_webhookUrl, stringContent, cancellationToken);
if (logEvent.Exception != null)
{
var exContent = new
{
username = botName,
content = $"**Exception:** {logEvent.Exception.GetType().FullName}\n```{logEvent.Exception.Message}\n{logEvent.Exception.StackTrace}```"
};
var exJson = JsonHelper.ToJson(exContent);
using var exStringContent = new StringContent(exJson, Encoding.UTF8, "application/json");
await _httpClient.PostAsync(_webhookUrl, exStringContent, cancellationToken);
}
}
}
private static int GetDiscordColor(LogLevel level)
{
return level switch
{
LogLevel.Trace => 0x00FFFF, // Cyan
LogLevel.Debug => 0x00FF00, // Green
LogLevel.Information => 0xFFFFFF, // White
LogLevel.Warning => 0xFFFF00, // Yellow
LogLevel.Error => 0xFF0000, // Red
LogLevel.Critical => 0x800000, // Dark Red
_ => 0x808080, // Gray
};
}
private static object[] GetFields(LogEvent logEvent)
{
var fields = new List<object>();
foreach (var prop in logEvent.Properties)
{
fields.Add(new
{
name = prop.Key,
value = prop.Value?.ToString() ?? "null",
inline = true
});
}
return fields.ToArray();
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try
{
await _workerTask.ConfigureAwait(false);
}
catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try
{
await _workerTask.ConfigureAwait(false);
}
catch { }
_httpClient.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,188 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Elasticsearch logging flow using HTTP bulk API (without NEST)
/// </summary>
public sealed class ElasticSearchFlow : FlowBase, IAsyncDisposable
{
private const int ChannelCapacity = 4096;
private const int DefaultBatchSize = 100; // Bulk insert batch size
private readonly Channel<LogEvent> _channel;
private readonly Task _workerTask;
private readonly CancellationTokenSource _cts;
private readonly HttpClient _httpClient;
private readonly string _elasticsearchUrl;
private readonly string _indexName;
public ElasticSearchFlow(
string elasticsearchUrl,
string indexName = "logs",
LogLevel minimumLevel = LogLevel.Trace)
: base($"Elasticsearch:{indexName}", minimumLevel)
{
_elasticsearchUrl = elasticsearchUrl?.TrimEnd('/') ?? throw new ArgumentNullException(nameof(elasticsearchUrl));
_indexName = indexName;
_httpClient = new HttpClient();
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = BoundedChannelFullMode.DropOldest,
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_workerTask = Task.Run(() => ProcessQueueAsync(_cts.Token));
}
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
private async Task ProcessQueueAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
try
{
while (await _channel.Reader.WaitToReadAsync(cancellationToken))
{
while (_channel.Reader.TryRead(out var logEvent))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize)
{
await SendBulkAsync(batch, cancellationToken);
batch.Clear();
}
}
if (batch.Count > 0)
{
await SendBulkAsync(batch, cancellationToken);
batch.Clear();
}
}
if (batch.Count > 0)
{
await SendBulkAsync(batch, cancellationToken);
}
}
catch (OperationCanceledException) { }
catch (Exception ex)
{
Console.Error.WriteLine($"ElasticSearchFlow error: {ex.Message}");
}
}
private async Task SendBulkAsync(List<LogEvent> batch, CancellationToken cancellationToken)
{
var sb = new StringBuilder();
foreach (var logEvent in batch)
{
// Action metadata
sb.AppendLine(JsonHelper.ToJson(new { index = new { _index = _indexName } }));
// Document
var doc = new Dictionary<string, object?>
{
["timestamp"] = LogEvent.GetDateTime(logEvent.Timestamp).ToString("O"),
["level"] = logEvent.Level.ToString(),
["category"] = logEvent.Category ?? string.Empty,
["message"] = logEvent.Message.ToString(),
["threadId"] = logEvent.ThreadId
};
if (logEvent.Exception != null)
{
doc["exception"] = new
{
type = logEvent.Exception.GetType().FullName,
message = logEvent.Exception.Message,
stackTrace = logEvent.Exception.StackTrace
};
}
if (logEvent.Properties.Count > 0)
{
var props = new Dictionary<string, object?>();
foreach (var prop in logEvent.Properties)
{
props[prop.Key] = prop.Value;
}
doc["properties"] = props;
}
sb.AppendLine(JsonHelper.ToJson(doc));
}
var content = new StringContent(sb.ToString(), Encoding.UTF8, "application/x-ndjson");
using var response = await _httpClient.PostAsync($"{_elasticsearchUrl}/_bulk", content, cancellationToken);
if (!response.IsSuccessStatusCode)
{
var respText = await response.Content.ReadAsStringAsync();
Console.Error.WriteLine($"ElasticSearchFlow bulk insert failed: {response.StatusCode} {respText}");
}
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try
{
await _workerTask.ConfigureAwait(false);
}
catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try
{
await _workerTask.ConfigureAwait(false);
}
catch { }
_httpClient.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,331 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.EonaCatLogStackCore;
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Mail;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Sends log events as email via SMTP.
///
/// Includes built-in digest batching: instead of one email per event, events are
/// accumulated for up to <see cref="DigestInterval"/> and sent as a single digest.
/// A "flush-on-critical" option bypasses batching for Critical events.
/// </summary>
public sealed class EmailFlow : FlowBase
{
private readonly string _headerName = "<h2>EonaCat Logger Log Digest</h2>";
private readonly string _smtpHost;
private readonly int _smtpPort;
private readonly bool _useSsl;
private readonly string _username;
private readonly string _password;
private readonly string _from;
private readonly string[] _to;
private readonly string _subjectPrefix;
private readonly TimeSpan _digestInterval;
private readonly bool _flushOnCritical;
private readonly int _maxEventsPerDigest;
private readonly List<LogEvent> _pending = new List<LogEvent>();
private readonly object _lock = new object();
private DateTime _lastSent = DateTime.UtcNow;
private readonly CancellationTokenSource _cts = new CancellationTokenSource();
private readonly Thread _digestThread;
private long _totalEmails;
public EmailFlow(
string smtpHost,
int smtpPort = 587,
bool useSsl = true,
string username = null,
string password = null,
string from = null,
string to = null,
string subjectPrefix = "[EonaCatLogStack]",
int digestMinutes = 5,
bool flushOnCritical = true,
int maxEventsPerDigest = 100,
string headerName = null,
LogLevel minimumLevel = LogLevel.Error)
: base("Email:" + smtpHost, minimumLevel)
{
if (smtpHost == null)
{
throw new ArgumentNullException("smtpHost");
}
if (to == null)
{
throw new ArgumentNullException("to");
}
if (string.IsNullOrWhiteSpace(headerName))
{
_headerName = headerName;
}
_smtpHost = smtpHost;
_smtpPort = smtpPort;
_useSsl = useSsl;
_username = username;
_password = password;
_from = from ?? ("eonacat-logger@" + smtpHost);
_to = to.Split(new char[] { ',', ';' },
StringSplitOptions.RemoveEmptyEntries);
_subjectPrefix = subjectPrefix ?? "[EonaCatLogStack]";
_digestInterval = TimeSpan.FromMinutes(digestMinutes < 1 ? 1 : digestMinutes);
_flushOnCritical = flushOnCritical;
_maxEventsPerDigest = maxEventsPerDigest < 1 ? 1 : maxEventsPerDigest;
_digestThread = new Thread(DigestLoop)
{
IsBackground = true,
Name = "EmailFlow.Digest",
Priority = ThreadPriority.BelowNormal
};
_digestThread.Start();
}
public long TotalEmailsSent { get { return Interlocked.Read(ref _totalEmails); } }
public override Task<WriteResult> BlastAsync(
LogEvent logEvent,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
bool sendNow = false;
lock (_lock)
{
_pending.Add(logEvent);
if (_flushOnCritical && logEvent.Level >= LogLevel.Critical)
{
sendNow = true;
}
if (_pending.Count >= _maxEventsPerDigest)
{
sendNow = true;
}
}
if (sendNow)
{
SendDigestAsync();
}
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
public override Task<WriteResult> BlastBatchAsync(
ReadOnlyMemory<LogEvent> logEvents,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled)
{
return Task.FromResult(WriteResult.FlowDisabled);
}
foreach (LogEvent e in logEvents.ToArray())
{
if (IsLogLevelEnabled(e))
{
BlastAsync(e, cancellationToken);
}
}
return Task.FromResult(WriteResult.Success);
}
public override Task FlushAsync(CancellationToken cancellationToken = default(CancellationToken))
{
SendDigestAsync();
return Task.FromResult(0);
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_cts.Cancel();
_digestThread.Join(TimeSpan.FromSeconds(5));
SendDigestAsync();
_cts.Dispose();
await base.DisposeAsync().ConfigureAwait(false);
}
private void DigestLoop()
{
while (!_cts.Token.IsCancellationRequested)
{
try { Thread.Sleep(TimeSpan.FromSeconds(30)); }
catch (ThreadInterruptedException) { break; }
lock (_lock)
{
if (_pending.Count > 0 && DateTime.UtcNow - _lastSent >= _digestInterval)
{
SendDigestAsync();
}
}
}
}
private void SendDigestAsync()
{
List<LogEvent> batch;
lock (_lock)
{
if (_pending.Count == 0)
{
return;
}
batch = new List<LogEvent>(_pending);
_pending.Clear();
_lastSent = DateTime.UtcNow;
}
// Fire-and-forget on a thread pool thread
ThreadPool.QueueUserWorkItem(_ => SendDigest(batch));
}
private void SendDigest(List<LogEvent> events)
{
try
{
string subject = BuildSubject(events);
string body = BuildBody(events);
using (SmtpClient smtp = new SmtpClient(_smtpHost, _smtpPort))
{
smtp.EnableSsl = _useSsl;
if (!string.IsNullOrEmpty(_username))
{
smtp.Credentials = new NetworkCredential(_username, _password);
}
using (MailMessage msg = new MailMessage())
{
msg.From = new MailAddress(_from);
msg.Subject = subject;
msg.Body = body;
msg.IsBodyHtml = true;
foreach (string addr in _to)
{
msg.To.Add(addr.Trim());
}
smtp.Send(msg);
}
}
Interlocked.Increment(ref _totalEmails);
}
catch (Exception ex)
{
Console.Error.WriteLine("[EmailFlow] Send error: " + ex.Message);
Interlocked.Increment(ref DroppedCount);
}
}
private string BuildSubject(List<LogEvent> events)
{
LogLevel maxLevel = LogLevel.Trace;
foreach (LogEvent e in events)
{
if (e.Level > maxLevel)
{
maxLevel = e.Level;
}
}
return _subjectPrefix + " " + LevelString(maxLevel) +
" " + events.Count + " event(s) @ " +
DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss") + " UTC";
}
private string BuildBody(List<LogEvent> events)
{
var sb = new StringBuilder(events.Count * 300);
sb.AppendLine("<html><body style='font-family:monospace;font-size:13px'>");
sb.AppendLine(_headerName);
sb.AppendLine("<table border='1' cellpadding='4' cellspacing='0' style='border-collapse:collapse;width:100%'>");
sb.AppendLine("<tr style='background:#333;color:white'>" +
"<th>Time</th><th>Level</th><th>Category</th>" +
"<th>Message</th><th>Exception</th></tr>");
foreach (LogEvent e in events)
{
string color = LevelColor(e.Level);
string ts = LogEvent.GetDateTime(e.Timestamp).ToString("HH:mm:ss.fff");
string msg = HtmlEncode(e.Message.Length > 0 ? e.Message.ToString() : string.Empty);
string exc = e.Exception != null
? HtmlEncode(e.Exception.GetType().Name + ": " + e.Exception.Message)
: string.Empty;
sb.AppendFormat(
"<tr style='background:{0}'><td>{1}</td><td><b>{2}</b></td><td>{3}</td><td>{4}</td><td>{5}</td></tr>",
color, ts, LevelString(e.Level),
HtmlEncode(e.Category ?? string.Empty), msg, exc);
sb.AppendLine();
}
sb.AppendLine("</table></body></html>");
return sb.ToString();
}
private static string LevelColor(LogLevel level)
{
switch (level)
{
case LogLevel.Warning: return "#FFF3CD";
case LogLevel.Error: return "#F8D7DA";
case LogLevel.Critical: return "#F1AEB5";
default: return "#FFFFFF";
}
}
private static string LevelString(LogLevel level)
{
switch (level)
{
case LogLevel.Trace: return "TRACE";
case LogLevel.Debug: return "DEBUG";
case LogLevel.Information: return "INFO";
case LogLevel.Warning: return "WARN";
case LogLevel.Error: return "ERROR";
case LogLevel.Critical: return "CRITICAL";
default: return level.ToString().ToUpperInvariant();
}
}
private static string HtmlEncode(string s)
{
if (string.IsNullOrEmpty(s))
{
return string.Empty;
}
return s.Replace("&", "&amp;")
.Replace("<", "&lt;")
.Replace(">", "&gt;")
.Replace("\"", "&quot;");
}
}
}

View File

@@ -0,0 +1,499 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.EonaCatLogStackCore;
using System;
using System.Collections.Concurrent;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Writes log events to AES-256-CBC encrypted, append-only binary files.
///
/// File layout:
/// [4 bytes magic "EONA"] [32 bytes salt] [16 bytes IV]
/// repeated: [4 bytes LE block-length] [N bytes ciphertext]
///
/// Key derivation: PBKDF2-HMACSHA1 with 100 000 iterations.
/// Each individual line is encrypted independently (ECB-safe CBC block) so the
/// file can be read entry-by-entry via <see cref="DecryptToFile"/>.
///
/// </summary>
public sealed class EncryptedFileFlow : FlowBase
{
private static readonly byte[] Magic = new byte[] { 0x45, 0x4F, 0x4E, 0x41 }; // "EONA"
private const int SaltSize = 32;
private const int IvSize = 16;
private const int KeySize = 32; // AES-256
private const int Pbkdf2Iter = 100000;
private readonly BlockingCollection<string> _queue;
private readonly CancellationTokenSource _cts = new CancellationTokenSource();
private readonly Thread _writerThread;
private readonly Thread _flushThread;
private readonly string _directory;
private readonly string _filePrefix;
private readonly string _password;
private readonly long _maxFileSize;
private readonly int _flushIntervalMs;
private readonly TimestampMode _timestampMode;
private readonly object _lock = new object();
private FileStream _currentStream;
private ICryptoTransform _encryptor;
private string _currentPath;
private long _currentSize;
private DateTime _currentDate;
private long _totalWritten;
private long _totalRotations;
public EncryptedFileFlow(
string directory,
string password,
string filePrefix = "encrypted_log",
long maxFileSize = 50L * 1024 * 1024,
int flushIntervalMs = 3000,
LogLevel minimumLevel = LogLevel.Trace,
TimestampMode tsMode = TimestampMode.Utc)
: base("EncryptedFile:" + directory, minimumLevel)
{
if (directory == null)
{
throw new ArgumentNullException("directory");
}
if (password == null)
{
throw new ArgumentNullException("password");
}
if (filePrefix == null)
{
throw new ArgumentNullException("filePrefix");
}
_directory = directory;
_password = password;
_filePrefix = filePrefix;
_maxFileSize = maxFileSize;
_flushIntervalMs = flushIntervalMs;
_timestampMode = tsMode;
// Resolve relative path
if (_directory.StartsWith("./", StringComparison.Ordinal))
{
_directory = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, _directory.Substring(2));
}
Directory.CreateDirectory(_directory);
_queue = new BlockingCollection<string>(new ConcurrentQueue<string>(), 8192);
_writerThread = new Thread(WriterLoop)
{
IsBackground = true,
Name = "EncryptedFileFlow.Writer",
Priority = ThreadPriority.AboveNormal
};
_writerThread.Start();
_flushThread = new Thread(FlushLoop)
{
IsBackground = true,
Name = "EncryptedFileFlow.Flush",
Priority = ThreadPriority.BelowNormal
};
_flushThread.Start();
}
/// <summary>
/// Decrypts an .eona file produced by this flow to a plain-text file.
/// </summary>
public static bool DecryptToFile(string encryptedPath, string outputPath, string password)
{
if (encryptedPath == null)
{
throw new ArgumentNullException("encryptedPath");
}
if (outputPath == null)
{
throw new ArgumentNullException("outputPath");
}
if (password == null)
{
throw new ArgumentNullException("password");
}
try
{
using (FileStream source = File.OpenRead(encryptedPath))
{
byte[] magic = new byte[4];
ReadExact(source, magic, 4);
for (int i = 0; i < 4; i++)
{
if (magic[i] != Magic[i])
{
throw new InvalidDataException("Not a valid EONA encrypted log file.");
}
}
byte[] salt = new byte[SaltSize];
byte[] iv = new byte[IvSize];
ReadExact(source, salt, SaltSize);
ReadExact(source, iv, IvSize);
byte[] key = DeriveKey(password, salt);
using (Aes aes = Aes.Create())
{
aes.KeySize = 256;
aes.Mode = CipherMode.CBC;
aes.Padding = PaddingMode.PKCS7;
aes.Key = key;
aes.IV = iv;
using (ICryptoTransform dec = aes.CreateDecryptor())
using (StreamWriter out_ = new StreamWriter(outputPath, false, Encoding.UTF8))
{
byte[] buffer = new byte[4];
while (source.Position < source.Length)
{
int read = source.Read(buffer, 0, 4);
if (read < 4)
{
break;
}
int blockLength = BitConverter.ToInt32(buffer, 0);
if (blockLength <= 0 || blockLength > 16 * 1024 * 1024)
{
throw new InvalidDataException("Corrupt block at offset " + (source.Position - 4));
}
byte[] cipher = new byte[blockLength];
ReadExact(source, cipher, blockLength);
byte[] plain = dec.TransformFinalBlock(cipher, 0, cipher.Length);
out_.WriteLine(Encoding.UTF8.GetString(plain));
return true;
}
}
}
}
}
catch (Exception e)
{
Console.WriteLine($"Exception during decryption => {e.Message}");
}
return false;
}
public LogStats GetStats()
{
return new LogStats(
Interlocked.Read(ref _totalWritten),
Interlocked.Read(ref DroppedCount),
Interlocked.Read(ref _totalRotations), 0, 0);
}
public override Task<WriteResult> BlastAsync(
LogEvent logEvent,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
return Task.FromResult(TryEnqueue(Format(logEvent)));
}
public override Task<WriteResult> BlastBatchAsync(
ReadOnlyMemory<LogEvent> logEvents,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled)
{
return Task.FromResult(WriteResult.FlowDisabled);
}
WriteResult result = WriteResult.Success;
foreach (LogEvent e in logEvents.ToArray())
{
if (e.Level < MinimumLevel)
{
continue;
}
if (TryEnqueue(Format(e)) == WriteResult.Dropped)
{
result = WriteResult.Dropped;
}
}
return Task.FromResult(result);
}
public override Task FlushAsync(CancellationToken cancellationToken = default(CancellationToken))
{
lock (_lock)
{
if (_currentStream != null)
{
_currentStream.Flush(true);
}
}
return Task.FromResult(0);
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_queue.CompleteAdding();
_cts.Cancel();
_writerThread.Join(TimeSpan.FromSeconds(5));
_flushThread.Join(TimeSpan.FromSeconds(2));
lock (_lock) { CloseCurrentFile(); }
_cts.Dispose();
_queue.Dispose();
await base.DisposeAsync().ConfigureAwait(false);
}
private void WriterLoop()
{
try
{
while (!_queue.IsCompleted)
{
string line;
try { line = _queue.Take(_cts.Token); }
catch (OperationCanceledException) { break; }
catch (InvalidOperationException) { break; }
WriteEncrypted(line);
string extra;
int batch = 0;
while (batch < 256 && _queue.TryTake(out extra))
{
WriteEncrypted(extra);
batch++;
}
}
}
catch (Exception ex)
{
Console.Error.WriteLine("[EncryptedFileFlow] Writer error: " + ex.Message);
}
finally
{
string remaining;
while (_queue.TryTake(out remaining))
{
WriteEncrypted(remaining);
}
lock (_lock) { CloseCurrentFile(); }
}
}
private void FlushLoop()
{
while (!_cts.Token.IsCancellationRequested)
{
try { Thread.Sleep(_flushIntervalMs); }
catch (ThreadInterruptedException) { break; }
lock (_lock)
{
if (_currentStream != null)
{
try { _currentStream.Flush(true); } catch { }
}
}
}
}
private void WriteEncrypted(string line)
{
lock (_lock)
{
DateTime today = _timestampMode == TimestampMode.Local
? DateTime.Now.Date
: DateTime.UtcNow.Date;
if (_currentStream == null || _currentDate != today || _currentSize > _maxFileSize)
{
if (_currentStream != null)
{
Interlocked.Increment(ref _totalRotations);
}
CloseCurrentFile();
OpenNewFile(today);
}
byte[] plain = Encoding.UTF8.GetBytes(line);
byte[] cipher = _encryptor.TransformFinalBlock(plain, 0, plain.Length);
byte[] lenBuf = BitConverter.GetBytes(cipher.Length);
_currentStream.Write(lenBuf, 0, 4);
_currentStream.Write(cipher, 0, cipher.Length);
_currentSize += 4 + cipher.Length;
Interlocked.Increment(ref _totalWritten);
Interlocked.Increment(ref BlastedCount);
}
}
private void OpenNewFile(DateTime date)
{
_currentDate = date;
_currentPath = Path.Combine(
_directory,
_filePrefix + "_" + Environment.MachineName + "_" + date.ToString("yyyyMMdd") + ".eona");
bool isNew = !File.Exists(_currentPath) || new FileInfo(_currentPath).Length == 0;
_currentStream = new FileStream(
_currentPath, FileMode.Append, FileAccess.Write, FileShare.Read, 65536);
byte[] salt = new byte[SaltSize];
byte[] iv = new byte[IvSize];
if (isNew)
{
using (RNGCryptoServiceProvider rng = new RNGCryptoServiceProvider())
{
rng.GetBytes(salt);
rng.GetBytes(iv);
}
_currentStream.Write(Magic, 0, 4);
_currentStream.Write(salt, 0, SaltSize);
_currentStream.Write(iv, 0, IvSize);
_currentSize = 4 + SaltSize + IvSize;
}
else
{
// Re-read header so we continue the same key/IV session
using (FileStream hdr = File.OpenRead(_currentPath))
{
hdr.Seek(4, SeekOrigin.Begin);
ReadExact(hdr, salt, SaltSize);
ReadExact(hdr, iv, IvSize);
}
_currentSize = new FileInfo(_currentPath).Length;
}
byte[] key = DeriveKey(_password, salt);
Aes aes = Aes.Create();
aes.KeySize = 256;
aes.Mode = CipherMode.CBC;
aes.Padding = PaddingMode.PKCS7;
aes.Key = key;
aes.IV = iv;
_encryptor = aes.CreateEncryptor();
}
private void CloseCurrentFile()
{
if (_encryptor != null)
{
try { _encryptor.Dispose(); } catch { }
_encryptor = null;
}
if (_currentStream != null)
{
try { _currentStream.Flush(true); _currentStream.Dispose(); } catch { }
_currentStream = null;
}
}
private WriteResult TryEnqueue(string line)
{
if (_queue.TryAdd(line))
{
return WriteResult.Success;
}
Interlocked.Increment(ref DroppedCount);
return WriteResult.Dropped;
}
private string Format(LogEvent log)
{
DateTime ts = LogEvent.GetDateTime(log.Timestamp);
var sb = new StringBuilder(256);
sb.Append(ts.ToString("yyyy-MM-dd HH:mm:ss.fff"));
sb.Append(" [").Append(LevelString(log.Level)).Append("] ");
sb.Append(log.Category ?? string.Empty);
sb.Append(": ");
sb.Append(log.Message.Length > 0 ? log.Message.ToString() : string.Empty);
if (log.Exception != null)
{
sb.Append(" | EX: ").Append(log.Exception.GetType().Name)
.Append(": ").Append(log.Exception.Message);
}
if (log.Properties.Count > 0)
{
sb.Append(" |");
foreach (var kv in log.Properties.ToArray())
{
sb.Append(' ').Append(kv.Key).Append('=')
.Append(kv.Value != null ? kv.Value.ToString() : "null");
}
}
return sb.ToString();
}
private static string LevelString(LogLevel level)
{
switch (level)
{
case LogLevel.Trace: return "TRACE";
case LogLevel.Debug: return "DEBUG";
case LogLevel.Information: return "INFO";
case LogLevel.Warning: return "WARN";
case LogLevel.Error: return "ERROR";
case LogLevel.Critical: return "CRITICAL";
default: return level.ToString().ToUpperInvariant();
}
}
private static byte[] DeriveKey(string password, byte[] salt)
{
using (Rfc2898DeriveBytes kdf = new Rfc2898DeriveBytes(password, salt, Pbkdf2Iter))
{
return kdf.GetBytes(KeySize);
}
}
private static void ReadExact(Stream stream, byte[] buf, int count)
{
int offset = 0;
while (offset < count)
{
int r = stream.Read(buf, offset, count - offset);
if (r == 0)
{
throw new EndOfStreamException("Unexpected end of encrypted log stream.");
}
offset += r;
}
}
}
}

View File

@@ -0,0 +1,214 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.IO;
using System.Net.Security;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
namespace EonaCat.LogStack.Flows
{
public sealed class EventLogFlow : FlowBase
{
private readonly string _destination;
private readonly int _port;
private TcpClient? _tcpClient;
private NetworkStream? _stream;
private SslStream? _sslStream;
private readonly bool _useTls;
private readonly RemoteCertificateValidationCallback? _certificateValidationCallback;
private readonly X509CertificateCollection? _clientCertificates;
private readonly List<LogEvent> _logBuffer;
private readonly int _bufferSize;
private readonly TimeSpan _flushInterval;
private readonly CancellationTokenSource _cts;
public EventLogFlow(
string destination,
int port = 514,
LogLevel minimumLevel = LogLevel.Trace,
int bufferSize = 100,
TimeSpan? flushInterval = null,
bool useTls = false,
RemoteCertificateValidationCallback? certificateValidationCallback = null,
X509CertificateCollection? clientCertificates = null
) : base($"EventLogFlow:{destination}:{port}", minimumLevel)
{
_destination = destination ?? throw new ArgumentNullException(nameof(destination));
_port = port;
_useTls = useTls;
_certificateValidationCallback = certificateValidationCallback;
_clientCertificates = clientCertificates;
_bufferSize = bufferSize;
_flushInterval = flushInterval ?? TimeSpan.FromSeconds(5);
_logBuffer = new List<LogEvent>(bufferSize);
_cts = new CancellationTokenSource();
_tcpClient = new TcpClient();
_ = StartFlushingLogsAsync(_cts.Token);
}
public void Log(string message, string category = "CustomEvent", LogLevel level = LogLevel.Information, object customData = null)
{
var logEvent = new LogEvent
{
Timestamp = DateTime.UtcNow.Ticks,
Level = level,
Message = message.ToCharArray(),
Category = category,
CustomData = customData != null ? JsonHelper.ToJson(customData) : string.Empty
};
// Add to buffer and trigger flush if needed
_logBuffer.Add(logEvent);
if (_logBuffer.Count >= _bufferSize)
{
_ = FlushLogsAsync();
}
}
private async Task StartFlushingLogsAsync(CancellationToken cancellationToken)
{
while (!cancellationToken.IsCancellationRequested)
{
await Task.Delay(_flushInterval, cancellationToken);
if (_logBuffer.Count > 0)
{
await FlushLogsAsync();
}
}
}
private async Task FlushLogsAsync()
{
if (_logBuffer.Count == 0)
{
return;
}
var logsToSend = new List<LogEvent>(_logBuffer);
_logBuffer.Clear();
await SendLogsAsync(logsToSend);
}
private async Task SendLogsAsync(IEnumerable<LogEvent> logEvents)
{
try
{
await EnsureConnectedAsync();
var logMessages = new StringBuilder();
foreach (var logEvent in logEvents)
{
logMessages.AppendLine(FormatLogMessage(logEvent));
}
var data = Encoding.UTF8.GetBytes(logMessages.ToString());
if (_useTls && _sslStream != null)
{
await _sslStream.WriteAsync(data, 0, data.Length);
await _sslStream.FlushAsync();
}
else if (_stream != null)
{
await _stream.WriteAsync(data, 0, data.Length);
await _stream.FlushAsync();
}
}
catch (Exception ex)
{
Console.Error.WriteLine($"Error sending logs: {ex.Message}");
}
}
private async Task EnsureConnectedAsync()
{
if (_tcpClient?.Connected ?? false)
{
return;
}
await _tcpClient?.ConnectAsync(_destination, _port);
if (_useTls)
{
_stream = _tcpClient?.GetStream();
_sslStream = new SslStream(_stream, false, _certificateValidationCallback);
await _sslStream.AuthenticateAsClientAsync(_destination, _clientCertificates, System.Security.Authentication.SslProtocols.Tls12, false);
}
else
{
_stream = _tcpClient?.GetStream();
}
}
private string FormatLogMessage(LogEvent logEvent)
{
var dt = LogEvent.GetDateTime(logEvent.Timestamp);
var sb = new StringBuilder();
sb.Append(dt.ToString("yyyy-MM-dd HH:mm:ss.fff"));
sb.Append(" [");
sb.Append(logEvent.Level.ToString().ToUpperInvariant());
sb.Append("] ");
sb.Append(logEvent.Category);
sb.Append(": ");
sb.Append(logEvent.Message);
if (!string.IsNullOrEmpty(logEvent.CustomData))
{
sb.Append(" | CustomData: ");
sb.Append(logEvent.CustomData);
}
return sb.ToString();
}
public override async Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return WriteResult.LevelFiltered;
}
await SendLogsAsync(new List<LogEvent> { logEvent });
return WriteResult.Success;
}
public override async Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return WriteResult.LevelFiltered;
}
await SendLogsAsync(logEvents.Span.ToArray());
return WriteResult.Success;
}
public override async ValueTask DisposeAsync()
{
_cts.Cancel();
_sslStream?.Dispose();
_stream?.Dispose();
_tcpClient?.Dispose();
await base.DisposeAsync();
}
public override Task FlushAsync(CancellationToken cancellationToken = default)
{
return Task.CompletedTask;
}
}
}

View File

@@ -0,0 +1,50 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.Flows;
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class FailoverFlow : FlowBase
{
private readonly IFlow _primary;
private readonly IFlow _secondary;
public FailoverFlow(IFlow primary, IFlow secondary)
: base($"Failover({primary.Name})", primary.MinimumLevel)
{
_primary = primary ?? throw new ArgumentNullException(nameof(primary));
_secondary = secondary ?? throw new ArgumentNullException(nameof(secondary));
}
public override async Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
var result = await _primary.BlastAsync(logEvent, cancellationToken).ConfigureAwait(false);
if (result == WriteResult.Success)
{
return result;
}
return await _secondary.BlastAsync(logEvent, cancellationToken).ConfigureAwait(false);
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
await _primary.FlushAsync(cancellationToken);
await _secondary.FlushAsync(cancellationToken);
}
public override async ValueTask DisposeAsync()
{
await _primary.DisposeAsync();
await _secondary.DisposeAsync();
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,285 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Net.Sockets;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class GraylogFlow : FlowBase
{
private const int DefaultBatchSize = 256;
private const int ChannelCapacity = 4096;
private const int MaxUdpPacketSize = 8192;
private readonly Channel<LogEvent> _channel;
private readonly Task _senderTask;
private readonly CancellationTokenSource _cts;
private readonly string _host;
private readonly int _port;
private readonly bool _useTcp;
private TcpClient? _tcpClient;
private NetworkStream? _tcpStream;
private UdpClient? _udpClient;
private readonly BackpressureStrategy _backpressureStrategy;
private readonly string _graylogHostName;
public GraylogFlow(
string host,
int port = 12201,
bool useTcp = false,
string graylogHostName = null,
LogLevel minimumLevel = LogLevel.Trace,
BackpressureStrategy backpressureStrategy = BackpressureStrategy.DropOldest)
: base($"Graylog:{host}:{port}", minimumLevel)
{
_host = host ?? throw new ArgumentNullException(nameof(host));
_port = port;
_useTcp = useTcp;
_backpressureStrategy = backpressureStrategy;
_graylogHostName = graylogHostName ?? Environment.MachineName;
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = backpressureStrategy switch
{
BackpressureStrategy.Wait => BoundedChannelFullMode.Wait,
BackpressureStrategy.DropNewest => BoundedChannelFullMode.DropWrite,
BackpressureStrategy.DropOldest => BoundedChannelFullMode.DropOldest,
_ => BoundedChannelFullMode.Wait
},
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
if (!_useTcp)
{
_udpClient = new UdpClient();
}
_senderTask = Task.Run(() => ProcessLogEventsAsync(_cts.Token));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
public override async Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return WriteResult.FlowDisabled;
}
var result = WriteResult.Success;
foreach (var logEvent in logEvents.Span)
{
if (!IsLogLevelEnabled(logEvent))
{
continue;
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
}
else
{
Interlocked.Increment(ref DroppedCount);
result = WriteResult.Dropped;
}
}
return result;
}
private async Task ProcessLogEventsAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
while (!cancellationToken.IsCancellationRequested)
{
try
{
if (_useTcp)
{
await EnsureTcpConnectedAsync(cancellationToken);
}
await foreach (var logEvent in _channel.Reader.ReadAllAsync(cancellationToken))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize || _channel.Reader.Count == 0)
{
await SendBatchAsync(batch, cancellationToken);
batch.Clear();
}
}
}
catch (Exception ex)
{
Console.Error.WriteLine($"GraylogFlow error: {ex.Message}");
await Task.Delay(1000, cancellationToken);
_tcpClient?.Dispose();
_tcpClient = null;
}
}
}
private async Task EnsureTcpConnectedAsync(CancellationToken cancellationToken)
{
if (_tcpClient != null && _tcpClient.Connected)
{
return;
}
_tcpClient?.Dispose();
_tcpClient = new TcpClient();
await _tcpClient.ConnectAsync(_host, _port);
_tcpStream = _tcpClient.GetStream();
}
private static double ToUnixTimeSeconds(DateTime dt)
{
// Make sure the DateTime is UTC
var utc = dt.ToUniversalTime();
var epoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
return (utc - epoch).TotalSeconds;
}
private async Task SendBatchAsync(List<LogEvent> batch, CancellationToken cancellationToken)
{
foreach (var logEvent in batch)
{
var dt = LogEvent.GetDateTime(logEvent.Timestamp);
var unixTimestamp = ToUnixTimeSeconds(dt);
var gelfMessage = new
{
version = "1.1",
host = _graylogHostName,
short_message = logEvent.Message,
timestamp = unixTimestamp,
level = MapLogLevelToSyslogSeverity(logEvent.Level),
_category = logEvent.Category
};
string json = JsonHelper.ToJson(gelfMessage);
byte[] data = Encoding.UTF8.GetBytes(json);
if (_useTcp)
{
if (_tcpStream != null)
{
await _tcpStream.WriteAsync(data, 0, data.Length, cancellationToken);
await _tcpStream.FlushAsync(cancellationToken);
}
}
else
{
if (_udpClient != null)
{
if (data.Length <= MaxUdpPacketSize)
{
await _udpClient.SendAsync(data, data.Length, _host, _port);
}
else
{
await SendUdpInChunksAsync(data, MaxUdpPacketSize, cancellationToken);
}
}
}
}
}
private async Task SendUdpInChunksAsync(byte[] data, int chunkSize, CancellationToken cancellationToken)
{
int offset = 0;
byte[] buffer = ArrayPool<byte>.Shared.Rent(chunkSize);
try
{
while (offset < data.Length)
{
int size = Math.Min(chunkSize, data.Length - offset);
Buffer.BlockCopy(data, offset, buffer, 0, size);
if (_udpClient != null)
{
await _udpClient.SendAsync(buffer, size, _host, _port);
}
offset += size;
}
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
private int MapLogLevelToSyslogSeverity(LogLevel level)
{
return level switch
{
LogLevel.Trace => 7,
LogLevel.Debug => 7,
LogLevel.Information => 6,
LogLevel.Warning => 4,
LogLevel.Error => 3,
LogLevel.Critical => 2,
_ => 6
};
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try { await _senderTask.ConfigureAwait(false); } catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try { await _senderTask.ConfigureAwait(false); } catch { }
_tcpStream?.Dispose();
_tcpClient?.Dispose();
_udpClient?.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,283 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows;
/// <summary>
/// HTTP flow for sending logs to remote endpoints with batching and retry logic
/// </summary>
public sealed class HttpFlow : FlowBase
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
private const int ChannelCapacity = 2048;
private const int DefaultBatchSize = 50;
private const int MaxRetries = 3;
private readonly Channel<LogEvent> _channel;
private readonly Task _writerTask;
private readonly CancellationTokenSource _cts;
private readonly HttpClient _httpClient;
private readonly string _endpoint;
private readonly bool _ownHttpClient;
private readonly TimeSpan _batchInterval;
private readonly Dictionary<string, string>? _headers;
public HttpFlow(
string endpoint,
HttpClient? httpClient = null,
LogLevel minimumLevel = LogLevel.Information,
TimeSpan? batchInterval = null,
Dictionary<string, string>? headers = null)
: base($"Http:{endpoint}", minimumLevel)
{
_endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint));
_batchInterval = batchInterval ?? TimeSpan.FromSeconds(5);
_headers = headers;
if (httpClient == null)
{
_httpClient = new HttpClient
{
Timeout = TimeSpan.FromSeconds(30)
};
_ownHttpClient = true;
}
else
{
_httpClient = httpClient;
_ownHttpClient = false;
}
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = BoundedChannelFullMode.DropOldest,
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_writerTask = Task.Run(() => ProcessLogEventsAsync(_cts.Token));
}
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try
{
await _writerTask.ConfigureAwait(false);
}
catch (OperationCanceledException)
{
// Expected
}
}
private async Task ProcessLogEventsAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
try
{
while (!cancellationToken.IsCancellationRequested)
{
var hasMore = true;
// Collect batch
while (batch.Count < DefaultBatchSize && hasMore)
{
if (_channel.Reader.TryRead(out var logEvent))
{
batch.Add(logEvent);
}
else
{
hasMore = false;
}
}
// Send batch if we have events
if (batch.Count > 0)
{
await SendBatchWithRetryAsync(batch, cancellationToken).ConfigureAwait(false);
batch.Clear();
}
// Wait for either new events or batch interval
if (_channel.Reader.Count == 0)
{
try
{
await Task.Delay(_batchInterval, cancellationToken).ConfigureAwait(false);
}
catch (OperationCanceledException)
{
// Expected if cancellation was requested during delay
break;
}
}
}
}
catch (OperationCanceledException)
{
// Expected when shutting down
}
catch (Exception ex)
{
Console.Error.WriteLine($"HttpFlow error: {ex.Message}");
}
}
private async Task SendBatchWithRetryAsync(List<LogEvent> batch, CancellationToken cancellationToken)
{
var payload = SerializeBatch(batch);
// Serialize payload to JSON string
var jsonPayload = JsonHelper.ToJson(payload);
for (int retry = 0; retry < MaxRetries; retry++)
{
try
{
using (var content = new StringContent(jsonPayload, Encoding.UTF8, "application/json"))
using (var request = new HttpRequestMessage(HttpMethod.Post, _endpoint) { Content = content })
{
if (_headers != null)
{
foreach (var header in _headers)
{
request.Headers.TryAddWithoutValidation(header.Key, header.Value);
}
}
var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (response.IsSuccessStatusCode)
{
return; // Success
}
// Last retry: mark as dropped
if (retry == MaxRetries - 1)
{
Interlocked.Add(ref DroppedCount, batch.Count);
}
}
}
catch
{
if (retry == MaxRetries - 1)
{
Interlocked.Add(ref DroppedCount, batch.Count);
}
}
// Exponential backoff
if (retry < MaxRetries - 1)
{
await Task.Delay(TimeSpan.FromMilliseconds(100 * Math.Pow(2, retry)), cancellationToken).ConfigureAwait(false);
}
}
}
private object[] SerializeBatch(List<LogEvent> batch)
{
var payload = new object[batch.Count];
for (int i = 0; i < batch.Count; i++)
{
var logEvent = batch[i];
var dto = new Dictionary<string, object?>
{
["timestamp"] = LogEvent.GetDateTime(logEvent.Timestamp).ToString("O"),
["level"] = logEvent.Level.ToString(),
["message"] = logEvent.Message.ToString(),
["category"] = logEvent.Category,
["threadId"] = logEvent.ThreadId
};
if (logEvent.TraceId != default)
{
dto["traceId"] = logEvent.TraceId.ToString();
}
if (logEvent.SpanId != default)
{
dto["spanId"] = logEvent.SpanId.ToString();
}
if (logEvent.Exception != null)
{
dto["exception"] = new
{
type = logEvent.Exception.GetType().FullName,
message = logEvent.Exception.Message,
stackTrace = logEvent.Exception.StackTrace
};
}
if (logEvent.Properties.Count > 0)
{
var props = new Dictionary<string, object?>();
foreach (var prop in logEvent.Properties)
{
props[prop.Key] = prop.Value;
}
dto["properties"] = props;
}
payload[i] = dto;
}
return payload;
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try
{
await _writerTask.ConfigureAwait(false);
}
catch { }
if (_ownHttpClient)
{
_httpClient.Dispose();
}
_cts.Dispose();
await base.DisposeAsync();
}
}

View File

@@ -0,0 +1,180 @@
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// In-memory flow with circular buffer for diagnostics and testing.
/// Designed for high-speed logging with bounded memory usage.
/// </summary>
public sealed class MemoryFlow : FlowBase
{
private readonly LogEvent[] _buffer;
private readonly int _capacity;
private int _head;
private int _tail;
private int _count;
private readonly object _lock = new();
public MemoryFlow(
int capacity = 10000,
LogLevel minimumLevel = LogLevel.Trace)
: base("Memory", minimumLevel)
{
if (capacity <= 0)
{
throw new ArgumentOutOfRangeException(nameof(capacity));
}
_capacity = capacity;
_buffer = new LogEvent[capacity];
_head = 0;
_tail = 0;
_count = 0;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
lock (_lock)
{
_buffer[_tail] = logEvent;
_tail = (_tail + 1) % _capacity;
if (_count == _capacity)
{
// Buffer is full, overwrite oldest
_head = (_head + 1) % _capacity;
Interlocked.Increment(ref DroppedCount);
}
else
{
_count++;
}
}
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
public override Task FlushAsync(CancellationToken cancellationToken = default)
{
// No-op for memory flow
return Task.CompletedTask;
}
/// <summary>
/// Gets all log events currently in the buffer
/// </summary>
public LogEvent[] GetEvents()
{
lock (_lock)
{
var events = new LogEvent[_count];
for (int i = 0; i < _count; i++)
{
var index = (_head + i) % _capacity;
events[i] = _buffer[index];
}
return events;
}
}
/// <summary>
/// Gets events matching the specified level
/// </summary>
public LogEvent[] GetEvents(LogLevel level)
{
lock (_lock)
{
var matching = new List<LogEvent>(_count);
for (int i = 0; i < _count; i++)
{
var index = (_head + i) % _capacity;
if (_buffer[index].Level == level)
{
matching.Add(_buffer[index]);
}
}
return matching.ToArray();
}
}
/// <summary>
/// Gets the most recent N events
/// </summary>
public LogEvent[] GetRecentEvents(int count)
{
lock (_lock)
{
var actualCount = Math.Min(count, _count);
var events = new LogEvent[actualCount];
for (int i = 0; i < actualCount; i++)
{
var index = (_tail - actualCount + i + _capacity) % _capacity;
events[i] = _buffer[index];
}
return events;
}
}
/// <summary>
/// Clears all events from the buffer
/// </summary>
public void Clear()
{
lock (_lock)
{
Array.Clear(_buffer, 0, _buffer.Length);
_head = 0;
_tail = 0;
_count = 0;
}
}
/// <summary>
/// Gets the current count of events in the buffer
/// </summary>
public int Count
{
get
{
lock (_lock)
{
return _count;
}
}
}
/// <summary>
/// Gets whether the buffer is full
/// </summary>
public bool IsFull
{
get
{
lock (_lock)
{
return _count == _capacity;
}
}
}
}

View File

@@ -0,0 +1,178 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// logging flow that sends messages to Microsoft Teams via an incoming webhook.
/// </summary>
public sealed class MicrosoftTeamsFlow : FlowBase, IAsyncDisposable
{
private const int ChannelCapacity = 4096;
private const int DefaultBatchSize = 5; // Keep batches small to avoid throttling
private readonly Channel<LogEvent> _channel;
private readonly Task _workerTask;
private readonly CancellationTokenSource _cts;
private readonly HttpClient _httpClient;
private readonly string _webhookUrl;
public MicrosoftTeamsFlow(
string webhookUrl,
LogLevel minimumLevel = LogLevel.Information)
: base("MicrosoftTeams", minimumLevel)
{
_webhookUrl = webhookUrl ?? throw new ArgumentNullException(nameof(webhookUrl));
_httpClient = new HttpClient();
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = BoundedChannelFullMode.DropOldest,
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_workerTask = Task.Run(() => ProcessQueueAsync(_cts.Token));
}
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
private async Task ProcessQueueAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
try
{
while (await _channel.Reader.WaitToReadAsync(cancellationToken))
{
while (_channel.Reader.TryRead(out var logEvent))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize)
{
await SendBatchAsync(batch, cancellationToken);
batch.Clear();
}
}
if (batch.Count > 0)
{
await SendBatchAsync(batch, cancellationToken);
batch.Clear();
}
}
if (batch.Count > 0)
{
await SendBatchAsync(batch, cancellationToken);
}
}
catch (OperationCanceledException) { }
catch (Exception ex)
{
Console.Error.WriteLine($"MicrosoftTeamsFlow error: {ex.Message}");
}
}
private async Task SendBatchAsync(List<LogEvent> batch, CancellationToken cancellationToken)
{
foreach (var logEvent in batch)
{
var payload = new
{
// Teams expects a "text" field with Markdown or simple message
text = BuildMessage(logEvent)
};
var json = JsonHelper.ToJson(payload);
using var content = new StringContent(json, Encoding.UTF8, "application/json");
await _httpClient.PostAsync(_webhookUrl, content, cancellationToken);
}
}
private static string BuildMessage(LogEvent logEvent)
{
var sb = new StringBuilder();
sb.AppendLine($"**Level:** {logEvent.Level}");
if (!string.IsNullOrWhiteSpace(logEvent.Category))
{
sb.AppendLine($"**Category:** {logEvent.Category}");
}
sb.AppendLine($"**Timestamp:** {LogEvent.GetDateTime(logEvent.Timestamp):yyyy-MM-dd HH:mm:ss.fff}");
sb.AppendLine($"**Message:** {logEvent.Message}");
if (logEvent.Exception != null)
{
sb.AppendLine("**Exception:**");
sb.AppendLine($"```\n{logEvent.Exception.GetType().FullName}: {logEvent.Exception.Message}\n{logEvent.Exception.StackTrace}\n```");
}
if (logEvent.Properties.Count > 0)
{
sb.AppendLine("**Properties:**");
foreach (var prop in logEvent.Properties)
{
sb.AppendLine($"`{prop.Key}` = `{prop.Value?.ToString() ?? "null"}`");
}
}
return sb.ToString();
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try
{
await _workerTask.ConfigureAwait(false);
}
catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try
{
await _workerTask.ConfigureAwait(false);
}
catch { }
_httpClient.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,403 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.EonaCatLogStackCore;
using System;
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Linq;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Publishes log events to a Redis channel using the PUBLISH command (Pub/Sub)
/// and optionally appends them to a Redis List (LPUSH) for persistence.
///
/// Uses raw TCP + RESP protocol, so there arent additional dependencies
///
/// Features:
/// - Reconnect with exponential back-off on connection failure
/// - Optional LPUSH to a list key with LTRIM to cap list length
/// - Optional password authentication (AUTH command)
/// - Optional DB selection (SELECT command)
/// - Background writer thread (non-blocking callers)
/// </summary>
public sealed class RedisFlow : FlowBase
{
private readonly string _host;
private readonly int _port;
private readonly string _password;
private readonly int _database;
private readonly string _channel; // PUBLISH channel (Pub/Sub)
private readonly string _listKey; // LPUSH list key (null = disabled)
private readonly int _maxListLength; // LTRIM cap (0 = unlimited)
private readonly BlockingCollection<string> _queue;
private readonly CancellationTokenSource _cts = new CancellationTokenSource();
private readonly Thread _writerThread;
private TcpClient _tcp;
private NetworkStream _stream;
private readonly object _connLock = new object();
private long _totalPublished;
public RedisFlow(
string host = "localhost",
int port = 6379,
string password = null,
int database = 0,
string channel = "eonacat:logs",
string listKey = null,
int maxListLength = 10000,
LogLevel minimumLevel = LogLevel.Trace)
: base("Redis:" + host + ":" + port, minimumLevel)
{
if (host == null)
{
throw new ArgumentNullException("host");
}
if (channel == null)
{
throw new ArgumentNullException("channel");
}
_host = host;
_port = port;
_password = password;
_database = database;
_channel = channel;
_listKey = listKey;
_maxListLength = maxListLength;
_queue = new BlockingCollection<string>(new ConcurrentQueue<string>(), 16384);
_writerThread = new Thread(WriterLoop)
{
IsBackground = true,
Name = "RedisFlow.Writer",
Priority = ThreadPriority.AboveNormal
};
_writerThread.Start();
}
public long TotalPublished { get { return Interlocked.Read(ref _totalPublished); } }
public override Task<WriteResult> BlastAsync(
LogEvent logEvent,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
string json = Serialize(logEvent);
if (_queue.TryAdd(json))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
public override Task<WriteResult> BlastBatchAsync(
ReadOnlyMemory<LogEvent> logEvents,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled)
{
return Task.FromResult(WriteResult.FlowDisabled);
}
WriteResult result = WriteResult.Success;
foreach (LogEvent e in logEvents.ToArray())
{
if (e.Level < MinimumLevel)
{
continue;
}
if (BlastAsync(e, cancellationToken).GetAwaiter().GetResult() == WriteResult.Dropped)
{
result = WriteResult.Dropped;
}
}
return Task.FromResult(result);
}
public override Task FlushAsync(CancellationToken cancellationToken = default(CancellationToken))
{
Stopwatch sw = System.Diagnostics.Stopwatch.StartNew();
while (_queue.Count > 0 && sw.Elapsed < TimeSpan.FromSeconds(5))
{
Thread.Sleep(5);
}
return Task.FromResult(0);
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_queue.CompleteAdding();
_cts.Cancel();
_writerThread.Join(TimeSpan.FromSeconds(5));
Disconnect();
_cts.Dispose();
_queue.Dispose();
await base.DisposeAsync().ConfigureAwait(false);
}
private void WriterLoop()
{
int backoff = 500;
while (!_queue.IsCompleted)
{
try
{
EnsureConnected();
backoff = 500; // reset on successful connect
while (!_queue.IsCompleted)
{
string msg;
try { msg = _queue.Take(_cts.Token); }
catch (OperationCanceledException) { return; }
catch (InvalidOperationException) { return; }
SendToRedis(msg);
string extra;
int batch = 0;
while (batch < 64 && _queue.TryTake(out extra))
{
SendToRedis(extra);
batch++;
}
}
}
catch (Exception ex)
{
Console.Error.WriteLine("[RedisFlow] Error: " + ex.Message + ". Reconnecting in " + backoff + "ms.");
Disconnect();
try { Thread.Sleep(backoff); } catch (ThreadInterruptedException) { return; }
backoff = Math.Min(backoff * 2, 30000);
}
}
}
private void SendToRedis(string json)
{
// PUBLISH channel message
SendCommand("PUBLISH", _channel, json);
// Optional list persistence
if (!string.IsNullOrEmpty(_listKey))
{
SendCommand("LPUSH", _listKey, json);
if (_maxListLength > 0)
{
SendCommand("LTRIM", _listKey, "0", (_maxListLength - 1).ToString());
}
}
Interlocked.Increment(ref _totalPublished);
}
private void EnsureConnected()
{
lock (_connLock)
{
if (_tcp != null && _tcp.Connected)
{
return;
}
Disconnect();
_tcp = new TcpClient();
_tcp.Connect(_host, _port);
_stream = _tcp.GetStream();
if (!string.IsNullOrEmpty(_password))
{
SendCommandRaw("AUTH", _password);
ReadResp(); // +OK
}
if (_database != 0)
{
SendCommandRaw("SELECT", _database.ToString());
ReadResp(); // +OK
}
}
}
private void Disconnect()
{
lock (_connLock)
{
if (_stream != null) { try { _stream.Dispose(); } catch { } _stream = null; }
if (_tcp != null) { try { _tcp.Dispose(); } catch { } _tcp = null; }
}
}
// Build RESP array and write + read response (inline, synchronous)
private void SendCommand(params string[] args)
{
SendCommandRaw(args);
ReadResp(); // discard but catch errors
}
private void SendCommandRaw(params string[] args)
{
var sb = new StringBuilder();
sb.Append('*').Append(args.Length).Append("\r\n");
foreach (string arg in args)
{
byte[] bytes = Encoding.UTF8.GetBytes(arg);
sb.Append('$').Append(bytes.Length).Append("\r\n");
// Append raw bytes via stream after the header
byte[] header = Encoding.ASCII.GetBytes(sb.ToString());
sb.Clear();
_stream.Write(header, 0, header.Length);
_stream.Write(bytes, 0, bytes.Length);
_stream.WriteByte(0x0D); // \r
_stream.WriteByte(0x0A); // \n
}
}
private void ReadResp()
{
// Read one RESP line — we only need to consume the response,
// error checking is minimal (connection drop will be caught upstream)
int b = _stream.ReadByte();
if (b == -1)
{
throw new Exception("Redis connection closed.");
}
if ((char)b == '-') // error line
{
StringBuilder err = new StringBuilder();
int c;
while ((c = _stream.ReadByte()) != -1 && (char)c != '\r')
{
err.Append((char)c);
}
_stream.ReadByte(); // consume \n
throw new Exception("Redis error: " + err);
}
// Consume remainder of the line
while (true)
{
int c = _stream.ReadByte();
if (c == -1 || (char)c == '\n')
{
break;
}
}
}
// ----------------------------------------------------------------- serialization
private static string Serialize(LogEvent log)
{
var sb = new StringBuilder(256);
sb.Append("{\"ts\":\"");
sb.Append(LogEvent.GetDateTime(log.Timestamp).ToString("O"));
sb.Append("\",\"level\":\"");
sb.Append(LevelString(log.Level));
sb.Append("\",\"host\":\"");
JsonEscape(Environment.MachineName, sb);
sb.Append("\",\"category\":\"");
JsonEscape(log.Category ?? string.Empty, sb);
sb.Append("\",\"message\":\"");
JsonEscape(log.Message.Length > 0 ? log.Message.ToString() : string.Empty, sb);
sb.Append('"');
if (log.Exception != null)
{
sb.Append(",\"exception\":\"");
JsonEscape(log.Exception.ToString(), sb);
sb.Append('"');
}
if (log.Properties.Count > 0)
{
sb.Append(",\"props\":{");
bool first = true;
foreach (var kv in log.Properties.ToArray())
{
if (!first)
{
sb.Append(',');
}
first = false;
sb.Append('"');
JsonEscape(kv.Key, sb);
sb.Append("\":\"");
JsonEscape(kv.Value != null ? kv.Value.ToString() : "null", sb);
sb.Append('"');
}
sb.Append('}');
}
sb.Append('}');
return sb.ToString();
}
private static void JsonEscape(string value, StringBuilder sb)
{
if (string.IsNullOrEmpty(value))
{
return;
}
foreach (char c in value)
{
switch (c)
{
case '"': sb.Append("\\\""); break;
case '\\': sb.Append("\\\\"); break;
case '\n': sb.Append("\\n"); break;
case '\r': sb.Append("\\r"); break;
case '\t': sb.Append("\\t"); break;
default:
if (char.IsControl(c)) { sb.Append("\\u"); sb.Append(((int)c).ToString("x4")); }
else
{
sb.Append(c);
}
break;
}
}
}
private static string LevelString(LogLevel level)
{
switch (level)
{
case LogLevel.Trace: return "TRACE";
case LogLevel.Debug: return "DEBUG";
case LogLevel.Information: return "INFO";
case LogLevel.Warning: return "WARN";
case LogLevel.Error: return "ERROR";
case LogLevel.Critical: return "CRITICAL";
default: return level.ToString().ToUpperInvariant();
}
}
}
}

View File

@@ -0,0 +1,67 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.Flows;
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class RetryFlow : FlowBase
{
private readonly IFlow _inner;
private readonly int _maxRetries;
private readonly TimeSpan _initialDelay;
private readonly bool _exponentialBackoff;
public RetryFlow(
IFlow inner,
int maxRetries = 3,
TimeSpan? initialDelay = null,
bool exponentialBackoff = true)
: base($"Retry({_innerName(inner)})", inner.MinimumLevel)
{
_inner = inner ?? throw new ArgumentNullException(nameof(inner));
_maxRetries = maxRetries;
_initialDelay = initialDelay ?? TimeSpan.FromMilliseconds(200);
_exponentialBackoff = exponentialBackoff;
}
public override async Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
int attempt = 0;
TimeSpan delay = _initialDelay;
while (true)
{
var result = await _inner.BlastAsync(logEvent, cancellationToken).ConfigureAwait(false);
if (result == WriteResult.Success || attempt >= _maxRetries)
{
return result;
}
attempt++;
await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
if (_exponentialBackoff)
{
delay = TimeSpan.FromMilliseconds(delay.TotalMilliseconds * 2);
}
}
}
public override Task FlushAsync(CancellationToken cancellationToken = default)
=> _inner.FlushAsync(cancellationToken);
public override ValueTask DisposeAsync()
=> _inner.DisposeAsync();
private static string _innerName(IFlow flow) => flow.Name ?? flow.GetType().Name;
}
}

View File

@@ -0,0 +1,239 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.EonaCatLogStackCore;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// An in-process circular buffer that retains the last N log events in memory.
/// </summary>
public sealed class RollingBufferFlow : FlowBase
{
private readonly LogEvent[] _ring;
private readonly int _capacity;
private long _head; // next write position (mod capacity)
private long _count; // total ever written (not capped)
private readonly object _ringLock = new object();
private readonly LogLevel _triggerLevel;
private readonly IFlow _triggerTarget;
private readonly int _preContextLines;
/// <param name="capacity">Maximum number of events to retain.</param>
/// <param name="minimumLevel">Minimum level to store in the buffer.</param>
/// <param name="triggerLevel">
/// When a log event reaches this level or above, the current buffer
/// contents are immediately forwarded to <paramref name="triggerTarget"/>.
/// Set to <c>LogLevel.None</c> (or omit) to disable.
/// </param>
/// <param name="triggerTarget">
/// Flow to forward the buffered context to when the trigger fires.
/// Can be null even when <paramref name="triggerLevel"/> is set.
/// </param>
/// <param name="preContextLines">
/// How many buffered lines to forward before the triggering event.
/// Defaults to entire buffer (int.MaxValue).
/// </param>
public RollingBufferFlow(
int capacity = 500,
LogLevel minimumLevel = LogLevel.Trace,
LogLevel triggerLevel = LogLevel.Error,
IFlow triggerTarget = null,
int preContextLines = int.MaxValue)
: base("RollingBuffer", minimumLevel)
{
if (capacity < 1)
{
throw new ArgumentOutOfRangeException("capacity", "Must be >= 1.");
}
_capacity = capacity;
_ring = new LogEvent[capacity];
_triggerLevel = triggerLevel;
_triggerTarget = triggerTarget;
_preContextLines = preContextLines < 0 ? 0 : preContextLines;
}
/// <summary>Returns a snapshot of the buffer from oldest to newest.</summary>
public LogEvent[] GetAll()
{
lock (_ringLock)
{
long total = Math.Min(_count, _capacity);
if (total == 0)
{
return new LogEvent[0];
}
LogEvent[] result = new LogEvent[total];
long start = (_count > _capacity) ? _head : 0;
for (long i = 0; i < total; i++)
{
result[i] = _ring[(start + i) % _capacity];
}
return result;
}
}
/// <summary>Returns the N most recent events.</summary>
public LogEvent[] GetRecent(int n)
{
lock (_ringLock)
{
long total = Math.Min(_count, _capacity);
long take = Math.Min(n, total);
if (take <= 0)
{
return new LogEvent[0];
}
LogEvent[] result = new LogEvent[take];
long start = (_head - take + _capacity * 2) % _capacity;
for (long i = 0; i < take; i++)
{
result[i] = _ring[(start + i) % _capacity];
}
return result;
}
}
/// <summary>Returns all events at or above <paramref name="level"/>.</summary>
public LogEvent[] GetByLevel(LogLevel level)
{
LogEvent[] all = GetAll();
List<LogEvent> filtered = new List<LogEvent>(all.Length);
foreach (LogEvent e in all)
{
if (e.Level >= level)
{
filtered.Add(e);
}
}
return filtered.ToArray();
}
/// <summary>Number of events currently held in the buffer.</summary>
public int Count
{
get { lock (_ringLock) { return (int)Math.Min(_count, _capacity); } }
}
/// <summary>Total events ever written (may exceed capacity).</summary>
public long TotalWritten { get { return Interlocked.Read(ref _count); } }
/// <summary>Clears all stored events.</summary>
public void Clear()
{
lock (_ringLock)
{
Array.Clear(_ring, 0, _capacity);
_head = 0;
_count = 0;
}
}
public override Task<WriteResult> BlastAsync(
LogEvent logEvent,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
bool triggered = false;
lock (_ringLock)
{
_ring[_head % _capacity] = logEvent;
_head = (_head + 1) % _capacity;
_count++;
if (_triggerTarget != null
&& logEvent.Level >= _triggerLevel
&& _triggerLevel != LogLevel.None)
{
triggered = true;
}
}
Interlocked.Increment(ref BlastedCount);
if (triggered)
{
ForwardToTarget(logEvent);
}
return Task.FromResult(WriteResult.Success);
}
public override Task<WriteResult> BlastBatchAsync(
ReadOnlyMemory<LogEvent> logEvents,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled)
{
return Task.FromResult(WriteResult.FlowDisabled);
}
foreach (LogEvent e in logEvents.ToArray())
{
BlastAsync(e, cancellationToken);
}
return Task.FromResult(WriteResult.Success);
}
public override Task FlushAsync(CancellationToken cancellationToken = default(CancellationToken))
=> Task.FromResult(0);
public override ValueTask DisposeAsync()
{
IsEnabled = false;
Clear();
return base.DisposeAsync();
}
private void ForwardToTarget(LogEvent triggeringEvent)
{
if (_triggerTarget == null)
{
return;
}
try
{
// Grab context window
int take = _preContextLines == int.MaxValue ? _capacity : _preContextLines;
LogEvent[] context = GetRecent(take);
foreach (LogEvent ev in context)
{
if (ev.Equals(triggeringEvent))
{
continue; // avoid duplicate
}
_triggerTarget.BlastAsync(ev).GetAwaiter().GetResult();
}
// Always forward the triggering event last
_triggerTarget.BlastAsync(triggeringEvent).GetAwaiter().GetResult();
}
catch (Exception ex)
{
Console.Error.WriteLine("[RollingBufferFlow] Trigger forward error: " + ex.Message);
}
}
}
}

View File

@@ -0,0 +1,317 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.EonaCatLogStackCore;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Pushes log events to a SignalR hub via HTTP POST to the hub's /send endpoint.
/// Works with ASP.NET SignalR (classic) and ASP.NET Core SignalR server-side REST API.
///
/// A lightweight alternative to the SignalR client library
///
/// On the server side you need a minimal hub endpoint that accepts POST:
/// POST {hubUrl}/send body: { "target": "...", "arguments": [ { log json } ] }
///
/// For live dashboards: the hub broadcasts to a "logs" group; clients subscribe and
/// render events in real time.
/// </summary>
public sealed class SignalRFlow : FlowBase
{
private readonly string _hubUrl;
private readonly string _hubMethod;
private readonly HttpClient _http;
private readonly bool _ownsHttpClient;
private readonly int _batchSize;
private readonly TimeSpan _batchInterval;
private readonly List<string> _pending = new List<string>();
private readonly object _lock = new object();
private readonly CancellationTokenSource _cts = new CancellationTokenSource();
private readonly Thread _senderThread;
private long _totalSent;
public SignalRFlow(
string hubUrl,
string hubMethod = "ReceiveLog",
HttpClient httpClient = null,
int batchSize = 20,
int batchIntervalMs = 500,
LogLevel minimumLevel = LogLevel.Information)
: base("SignalR:" + hubUrl, minimumLevel)
{
if (hubUrl == null)
{
throw new ArgumentNullException("hubUrl");
}
_hubUrl = hubUrl.TrimEnd('/');
_hubMethod = hubMethod ?? "ReceiveLog";
_batchSize = batchSize < 1 ? 1 : batchSize;
_batchInterval = TimeSpan.FromMilliseconds(batchIntervalMs < 50 ? 50 : batchIntervalMs);
if (httpClient == null)
{
_http = new HttpClient();
_http.Timeout = TimeSpan.FromSeconds(10);
_ownsHttpClient = true;
}
else
{
_http = httpClient;
_ownsHttpClient = false;
}
_senderThread = new Thread(SenderLoop)
{
IsBackground = true,
Name = "SignalRFlow.Sender",
Priority = ThreadPriority.BelowNormal
};
_senderThread.Start();
}
public long TotalSent { get { return Interlocked.Read(ref _totalSent); } }
public override Task<WriteResult> BlastAsync(
LogEvent logEvent,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
string json = Serialize(logEvent);
bool sendNow = false;
lock (_lock)
{
_pending.Add(json);
if (_pending.Count >= _batchSize)
{
sendNow = true;
}
}
if (sendNow)
{
DispatchBatch();
}
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
public override Task<WriteResult> BlastBatchAsync(
ReadOnlyMemory<LogEvent> logEvents,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled)
{
return Task.FromResult(WriteResult.FlowDisabled);
}
foreach (LogEvent e in logEvents.ToArray())
{
if (IsLogLevelEnabled(e))
{
BlastAsync(e, cancellationToken);
}
}
return Task.FromResult(WriteResult.Success);
}
public override Task FlushAsync(CancellationToken cancellationToken = default(CancellationToken))
{
DispatchBatch();
return Task.FromResult(0);
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_cts.Cancel();
_senderThread.Join(TimeSpan.FromSeconds(5));
DispatchBatch();
if (_ownsHttpClient)
{
_http.Dispose();
}
_cts.Dispose();
await base.DisposeAsync().ConfigureAwait(false);
}
private void SenderLoop()
{
while (!_cts.Token.IsCancellationRequested)
{
try { Thread.Sleep(_batchInterval); }
catch (ThreadInterruptedException) { break; }
DispatchBatch();
}
}
private void DispatchBatch()
{
List<string> batch;
lock (_lock)
{
if (_pending.Count == 0)
{
return;
}
batch = new List<string>(_pending);
_pending.Clear();
}
ThreadPool.QueueUserWorkItem(_ =>
{
try { SendBatch(batch); }
catch (Exception ex)
{
Console.Error.WriteLine("[SignalRFlow] Send error: " + ex.Message);
Interlocked.Add(ref DroppedCount, batch.Count);
}
});
}
private void SendBatch(List<string> jsonEvents)
{
// Build envelope: { "target": "ReceiveLog", "arguments": [ [...events...] ] }
var sb = new StringBuilder(jsonEvents.Count * 200 + 64);
sb.Append("{\"target\":\"");
JsonEscape(_hubMethod, sb);
sb.Append("\",\"arguments\":[[");
for (int i = 0; i < jsonEvents.Count; i++)
{
if (i > 0)
{
sb.Append(',');
}
sb.Append(jsonEvents[i]);
}
sb.Append("]]}");
string payload = sb.ToString();
StringContent content = new StringContent(payload, Encoding.UTF8, "application/json");
HttpResponseMessage resp = _http.PostAsync(_hubUrl + "/send", content).GetAwaiter().GetResult();
if (resp.IsSuccessStatusCode)
{
Interlocked.Add(ref _totalSent, jsonEvents.Count);
}
else
{
Console.Error.WriteLine("[SignalRFlow] HTTP " + (int)resp.StatusCode + " from hub.");
Interlocked.Add(ref DroppedCount, jsonEvents.Count);
}
}
private static string Serialize(LogEvent log)
{
var sb = new StringBuilder(256);
sb.Append('{');
sb.Append("\"ts\":\"");
sb.Append(LogEvent.GetDateTime(log.Timestamp).ToString("O"));
sb.Append("\",\"level\":\"");
sb.Append(LevelString(log.Level));
sb.Append("\",\"category\":\"");
JsonEscape(log.Category ?? string.Empty, sb);
sb.Append("\",\"message\":\"");
JsonEscape(log.Message.Length > 0 ? log.Message.ToString() : string.Empty, sb);
sb.Append('"');
if (log.Exception != null)
{
sb.Append(",\"exception\":\"");
JsonEscape(log.Exception.GetType().Name + ": " + log.Exception.Message, sb);
sb.Append('"');
}
if (log.Properties.Count > 0)
{
sb.Append(",\"props\":{");
bool first = true;
foreach (var kv in log.Properties.ToArray())
{
if (!first)
{
sb.Append(',');
}
first = false;
sb.Append('"');
JsonEscape(kv.Key, sb);
sb.Append("\":\"");
JsonEscape(kv.Value != null ? kv.Value.ToString() : "null", sb);
sb.Append('"');
}
sb.Append('}');
}
sb.Append('}');
return sb.ToString();
}
private static void JsonEscape(string value, StringBuilder sb)
{
if (string.IsNullOrEmpty(value))
{
return;
}
foreach (char c in value)
{
switch (c)
{
case '"': sb.Append("\\\""); break;
case '\\': sb.Append("\\\\"); break;
case '\n': sb.Append("\\n"); break;
case '\r': sb.Append("\\r"); break;
case '\t': sb.Append("\\t"); break;
default:
if (char.IsControl(c))
{
sb.Append("\\u");
sb.Append(((int)c).ToString("x4"));
}
else
{
sb.Append(c);
}
break;
}
}
}
private static string LevelString(LogLevel level)
{
switch (level)
{
case LogLevel.Trace: return "TRACE";
case LogLevel.Debug: return "DEBUG";
case LogLevel.Information: return "INFO";
case LogLevel.Warning: return "WARN";
case LogLevel.Error: return "ERROR";
case LogLevel.Critical: return "CRITICAL";
default: return level.ToString().ToUpperInvariant();
}
}
}
}

View File

@@ -0,0 +1,174 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
/// <summary>
/// logging flow that sends messages to a Slack channel via webhook.
/// </summary>
public sealed class SlackFlow : FlowBase, IAsyncDisposable
{
private const int ChannelCapacity = 4096;
private const int DefaultBatchSize = 5; // Slack rate-limits, small batches are safer
private readonly Channel<LogEvent> _channel;
private readonly Task _workerTask;
private readonly CancellationTokenSource _cts;
private readonly HttpClient _httpClient;
private readonly string _webhookUrl;
public SlackFlow(
string webhookUrl,
LogLevel minimumLevel = LogLevel.Information)
: base("Slack", minimumLevel)
{
_webhookUrl = webhookUrl ?? throw new ArgumentNullException(nameof(webhookUrl));
_httpClient = new HttpClient();
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = BoundedChannelFullMode.DropOldest,
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_workerTask = Task.Run(() => ProcessQueueAsync(_cts.Token));
}
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
private async Task ProcessQueueAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
try
{
while (await _channel.Reader.WaitToReadAsync(cancellationToken))
{
while (_channel.Reader.TryRead(out var logEvent))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize)
{
await SendBatchAsync(batch, cancellationToken);
batch.Clear();
}
}
if (batch.Count > 0)
{
await SendBatchAsync(batch, cancellationToken);
batch.Clear();
}
}
if (batch.Count > 0)
{
await SendBatchAsync(batch, cancellationToken);
}
}
catch (OperationCanceledException) { }
catch (Exception ex)
{
Console.Error.WriteLine($"SlackFlow error: {ex.Message}");
}
}
private async Task SendBatchAsync(List<LogEvent> batch, CancellationToken cancellationToken)
{
foreach (var logEvent in batch)
{
var payload = new
{
text = BuildMessage(logEvent)
};
var json = JsonHelper.ToJson(payload);
using var content = new StringContent(json, Encoding.UTF8, "application/json");
await _httpClient.PostAsync(_webhookUrl, content, cancellationToken);
}
}
private static string BuildMessage(LogEvent logEvent)
{
var sb = new StringBuilder();
sb.AppendLine($"*Level:* {logEvent.Level}");
if (!string.IsNullOrWhiteSpace(logEvent.Category))
{
sb.AppendLine($"*Category:* {logEvent.Category}");
}
sb.AppendLine($"*Timestamp:* {LogEvent.GetDateTime(logEvent.Timestamp):yyyy-MM-dd HH:mm:ss.fff}");
sb.AppendLine($"*Message:* {logEvent.Message}");
if (logEvent.Exception != null)
{
sb.AppendLine("*Exception:*");
sb.AppendLine($"```{logEvent.Exception.GetType().FullName}: {logEvent.Exception.Message}\n{logEvent.Exception.StackTrace}```");
}
if (logEvent.Properties.Count > 0)
{
sb.AppendLine("*Properties:*");
foreach (var prop in logEvent.Properties)
{
sb.AppendLine($"`{prop.Key}` = `{prop.Value?.ToString() ?? "null"}`");
}
}
return sb.ToString();
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try
{
await _workerTask.ConfigureAwait(false);
}
catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try
{
await _workerTask.ConfigureAwait(false);
}
catch { }
_httpClient.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,69 @@
using EonaCat.LogStack.Core;
using System;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
public class SnmpTrapFlow : FlowBase
{
private readonly string _host;
private readonly int _port;
private readonly string _oid;
private readonly UdpClient _udpClient;
public SnmpTrapFlow(string host, int port = 162, string oid = "1.3.6.1.4.1.99999.1337.1.1.1", LogLevel minimumLevel = LogLevel.Trace) : base($"SNMP:{host}:{port}", minimumLevel)
{
_host = host ?? throw new ArgumentNullException(nameof(host));
_port = port;
_oid = oid;
_udpClient = new UdpClient();
}
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
var snmpTrapMessage = FormatSnmpTrapMessage(logEvent);
var data = Encoding.ASCII.GetBytes(snmpTrapMessage);
try
{
_udpClient.Send(data, data.Length, _host, _port);
return Task.FromResult(WriteResult.Success);
}
catch
{
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
}
private string FormatSnmpTrapMessage(LogEvent logEvent)
{
var stringBuilder = new StringBuilder();
stringBuilder.Append($"Trap OID: {_oid}");
stringBuilder.Append(" Timestamp: ").Append(logEvent.Timestamp);
stringBuilder.Append(" Level: ").Append(logEvent.Level);
stringBuilder.Append(" Message: ").Append(logEvent.Message);
return stringBuilder.ToString();
}
public override Task FlushAsync(CancellationToken cancellationToken = default)
{
// SNMP traps are sent immediately, so no flushing needed.
return Task.CompletedTask;
}
public override ValueTask DisposeAsync()
{
_udpClient?.Dispose();
return base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,201 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
namespace EonaCat.LogStack.Flows
{
public sealed class SplunkFlow : FlowBase
{
private const int DefaultBatchSize = 256;
private const int ChannelCapacity = 4096;
private readonly Channel<LogEvent> _channel;
private readonly Task _senderTask;
private readonly CancellationTokenSource _cts;
private readonly string _splunkUrl;
private readonly string _token;
private readonly string _sourcetype;
private readonly string _hostName;
private readonly BackpressureStrategy _backpressureStrategy;
private readonly HttpClient _httpClient;
public SplunkFlow(
string splunkUrl,
string token,
string sourcetype = "splunk_logs",
string hostName = null,
LogLevel minimumLevel = LogLevel.Trace,
BackpressureStrategy backpressureStrategy = BackpressureStrategy.DropOldest)
: base($"Splunk:{splunkUrl}", minimumLevel)
{
_splunkUrl = splunkUrl ?? throw new ArgumentNullException(nameof(splunkUrl));
_token = token ?? throw new ArgumentNullException(nameof(token));
_sourcetype = sourcetype;
_hostName = hostName ?? Environment.MachineName;
_backpressureStrategy = backpressureStrategy;
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = backpressureStrategy switch
{
BackpressureStrategy.Wait => BoundedChannelFullMode.Wait,
BackpressureStrategy.DropNewest => BoundedChannelFullMode.DropWrite,
BackpressureStrategy.DropOldest => BoundedChannelFullMode.DropOldest,
_ => BoundedChannelFullMode.Wait
},
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_httpClient = new HttpClient();
_httpClient.DefaultRequestHeaders.Add("Authorization", $"Splunk {_token}");
_senderTask = Task.Run(() => ProcessLogEventsAsync(_cts.Token));
}
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
public override async Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return WriteResult.FlowDisabled;
}
var result = WriteResult.Success;
foreach (var logEvent in logEvents.Span)
{
if (!IsLogLevelEnabled(logEvent))
{
continue;
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
}
else
{
Interlocked.Increment(ref DroppedCount);
result = WriteResult.Dropped;
}
}
return result;
}
private async Task ProcessLogEventsAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
while (!cancellationToken.IsCancellationRequested)
{
try
{
await foreach (var logEvent in _channel.Reader.ReadAllAsync(cancellationToken))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize || _channel.Reader.Count == 0)
{
await SendBatchAsync(batch, cancellationToken);
batch.Clear();
}
}
}
catch (Exception ex)
{
Console.Error.WriteLine($"SplunkFlow error: {ex.Message}");
await Task.Delay(1000, cancellationToken);
}
}
}
private async Task SendBatchAsync(List<LogEvent> batch, CancellationToken cancellationToken)
{
foreach (var logEvent in batch)
{
var splunkEvent = new
{
time = ToUnixTimeSeconds(LogEvent.GetDateTime(logEvent.Timestamp)),
host = _hostName,
sourcetype = _sourcetype,
@event = new
{
level = logEvent.Level.ToString(),
category = logEvent.Category,
message = logEvent.Message
}
};
string json = JsonHelper.ToJson(splunkEvent);
var content = new StringContent(json, Encoding.UTF8, "application/json");
try
{
await _httpClient.PostAsync(_splunkUrl, content, cancellationToken);
}
catch
{
// ignore errors
}
}
}
// Helper to convert DateTime to Unix timestamp (works in .NET 4.8.x)
private static double ToUnixTimeSeconds(DateTime dt)
{
var utc = dt.ToUniversalTime();
var epoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
return (utc - epoch).TotalSeconds;
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try { await _senderTask.ConfigureAwait(false); } catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try { await _senderTask.ConfigureAwait(false); } catch { }
_httpClient.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,320 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.Flows;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.NetworkInformation;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace ServiceMonitoring
{
public enum ServiceType
{
TCP = 0,
UDP,
HTTP,
HTTPS,
File
}
public class ServiceStatus
{
public string ServiceName { get; set; }
public string Host { get; set; }
public int Port { get; set; }
public string Status { get; set; }
public DateTime LastChecked { get; set; }
public ServiceType ServiceType { get; set; }
public string AdditionalInfo { get; set; }
}
public sealed class StatusFlow : FlowBase
{
private readonly List<ServiceStatus> _servicesToMonitor;
private readonly TimeSpan _checkInterval;
private readonly string _statusDirectory;
private readonly CancellationTokenSource _cts;
private readonly Action<ServiceStatus> _statusChangeTrigger;
/// <summary>
/// Log fileSize (default: 10 MB)
/// </summary>
public int MaxLogFileSize { get; set; } = 10 * 1024 * 1024; // 10 MB
/// <summary>
/// Max Log files (default: 5)
/// </summary>
public int MaxLogFiles { get; set; } = 5;
/// <summary>
/// Default interval in minutes (default: 5)
/// </summary>
public int DefaultIntervalCheckInMinutes { get; set; } = 5;
public StatusFlow(
List<ServiceStatus> servicesToMonitor,
TimeSpan? checkInterval,
string statusDirectory,
Action<ServiceStatus> statusChangeTrigger,
LogLevel minimumLevel = LogLevel.Trace
) : base("StatusFlow", minimumLevel)
{
_servicesToMonitor = servicesToMonitor;
if (checkInterval == null)
{
checkInterval = TimeSpan.FromMinutes(DefaultIntervalCheckInMinutes);
}
_checkInterval = checkInterval.Value;
_statusChangeTrigger = statusChangeTrigger;
if (string.IsNullOrWhiteSpace(statusDirectory))
{
statusDirectory = "./logs/status";
}
// Resolve relative path
if (statusDirectory.StartsWith("./", StringComparison.Ordinal))
{
statusDirectory = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, statusDirectory.Substring(2));
}
Directory.CreateDirectory(statusDirectory);
_statusDirectory = statusDirectory;
_cts = new CancellationTokenSource();
StartMonitoring();
}
public void StartMonitoring()
{
Task.Run(async () =>
{
while (IsEnabled && !_cts.Token.IsCancellationRequested)
{
await MonitorServicesAsync();
await Task.Delay(_checkInterval, _cts.Token);
}
});
}
private async Task MonitorServicesAsync()
{
foreach (var service in _servicesToMonitor)
{
bool isServiceAvailable = service.ServiceType switch
{
ServiceType.TCP => await IsTcpServiceAvailableAsync(service.Host, service.Port),
ServiceType.UDP => await IsUdpServiceAvailableAsync(service.Host, service.Port),
ServiceType.HTTP => await IsHttpServiceAvailableAsync(service.Host),
ServiceType.HTTPS => await IsHttpsServiceAvailableAsync(service),
ServiceType.File => await IsFileAvailableAsync(service.Host),
_ => false
};
if (isServiceAvailable != (service.Status == "Available"))
{
service.Status = isServiceAvailable ? "Available" : "Unavailable";
service.LastChecked = DateTime.UtcNow;
// Trigger action when service status changes
_statusChangeTrigger?.Invoke(service);
// Log the status
LogServiceStatusToFile(service);
}
}
}
public override async Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
Interlocked.Increment(ref DroppedCount);
return WriteResult.NoBlastZone;
}
public override async Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default)
{
Interlocked.Increment(ref DroppedCount);
return WriteResult.NoBlastZone;
}
private async Task<bool> IsTcpServiceAvailableAsync(string host, int port)
{
try
{
using (var tcpClient = new TcpClient())
{
await tcpClient.ConnectAsync(host, port);
return true;
}
}
catch
{
return false;
}
}
private async Task<bool> IsUdpServiceAvailableAsync(string host, int port)
{
try
{
using (var udpClient = new UdpClient())
{
var timeout = TimeSpan.FromSeconds(5);
var message = Encoding.ASCII.GetBytes("ping");
var sendTask = udpClient.SendAsync(message, message.Length, host, port);
var completedTask = await Task.WhenAny(sendTask, Task.Delay(timeout));
if (completedTask == sendTask)
{
var receiveTask = udpClient.ReceiveAsync();
var completedReceiveTask = await Task.WhenAny(receiveTask, Task.Delay(timeout));
return completedReceiveTask == receiveTask;
}
return false;
}
}
catch
{
return false;
}
}
private async Task<bool> IsHttpServiceAvailableAsync(string host)
{
try
{
var request = (HttpWebRequest)WebRequest.Create($"http://{host}");
request.Method = "HEAD";
using (var response = await request.GetResponseAsync())
{
return true;
}
}
catch
{
return false;
}
}
public async Task<bool> IsHttpsServiceAvailableAsync(ServiceStatus service)
{
try
{
var handler = new HttpClientHandler
{
ServerCertificateCustomValidationCallback = (sender, cert, chain, sslPolicyErrors) =>
{
if (sslPolicyErrors != System.Net.Security.SslPolicyErrors.None)
{
service.AdditionalInfo = $"Certificate Subject: {cert.Subject}\n" +
$"Certificate Issuer: {cert.Issuer}\n" +
$"Certificate Expiry: {cert.GetExpirationDateString()}\n" +
$"SSL Policy Errors: {sslPolicyErrors}\n";
foreach (var chainElement in chain.ChainElements)
{
service.AdditionalInfo += $"Chain Element: {chainElement.Certificate.Subject}, {chainElement.Certificate.Issuer}\n";
}
}
return sslPolicyErrors == System.Net.Security.SslPolicyErrors.None;
}
};
using (var client = new HttpClient(handler))
{
var response = await client.SendAsync(new HttpRequestMessage(HttpMethod.Head, $"https://{service.Host}"));
return response.IsSuccessStatusCode;
}
}
catch (Exception ex)
{
service.AdditionalInfo = $"Error: {ex.Message}";
return false;
}
}
private async Task<bool> IsFileAvailableAsync(string filePath)
{
try
{
return File.Exists(filePath);
}
catch
{
return false;
}
}
private void LogServiceStatusToFile(ServiceStatus service)
{
string statusMessage = $"{service.ServiceName} ({service.Host}:{service.Port}) - Status: {service.Status}, Last Checked: {service.LastChecked}";
if (!string.IsNullOrEmpty(service.AdditionalInfo))
{
statusMessage += $"\nAdditional Info: {service.AdditionalInfo}";
}
RollOverLogFileIfNeeded();
try
{
string logFilePath = Path.Combine(_statusDirectory, "status_log.txt");
using (var writer = new StreamWriter(logFilePath, append: true))
{
writer.WriteLine(statusMessage);
}
}
catch (Exception ex)
{
Console.WriteLine($"StatusFlow: Error writing to file: {ex.Message}");
}
}
private void RollOverLogFileIfNeeded()
{
try
{
string logFilePath = Path.Combine(_statusDirectory, "status_log.txt");
if (File.Exists(logFilePath) && new FileInfo(logFilePath).Length > MaxLogFileSize)
{
var logFiles = Directory.GetFiles(_statusDirectory, "status_log_*.txt").OrderBy(f => f).ToList();
if (logFiles.Count >= MaxLogFiles)
{
File.Delete(logFiles[0]);
logFiles.RemoveAt(0);
}
string newLogFilePath = Path.Combine(_statusDirectory, $"status_log_{DateTime.UtcNow:yyyyMMdd_HHmmss}.txt");
File.Move(logFilePath, newLogFilePath);
}
}
catch (Exception ex)
{
Console.WriteLine($"StatusFlow: Error handling log file rollover: {ex.Message}");
}
}
public override async ValueTask DisposeAsync()
{
_cts.Cancel();
await base.DisposeAsync();
}
public override Task FlushAsync(CancellationToken cancellationToken = default)
{
return Task.CompletedTask;
}
}
}

View File

@@ -0,0 +1,267 @@
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Security;
using System.Net.Sockets;
using System.Runtime.CompilerServices;
using System.Security.Authentication;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class SyslogTcpFlow : FlowBase
{
private const int DefaultBatchSize = 256;
private const int ChannelCapacity = 4096;
private readonly Channel<LogEvent> _channel;
private readonly Task _senderTask;
private readonly CancellationTokenSource _cts;
private readonly string _host;
private readonly int _port;
private TcpClient? _tcpClient;
private Stream _stream;
private readonly bool _useTls;
private readonly RemoteCertificateValidationCallback _certValidationCallback;
private readonly X509CertificateCollection _clientCertificates;
private readonly BackpressureStrategy _backpressureStrategy;
public SyslogTcpFlow(
string host,
int port = 514,
LogLevel minimumLevel = LogLevel.Trace,
BackpressureStrategy backpressureStrategy = BackpressureStrategy.DropOldest,
bool useTls = false,
RemoteCertificateValidationCallback certValidationCallback = null,
X509CertificateCollection clientCertificates = null)
: base($"SyslogTCP:{host}:{port}", minimumLevel)
{
_host = host ?? throw new ArgumentNullException(nameof(host));
_port = port;
_backpressureStrategy = backpressureStrategy;
_useTls = useTls;
_certValidationCallback = certValidationCallback ?? DefaultCertificateValidation;
_clientCertificates = clientCertificates;
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = backpressureStrategy == BackpressureStrategy.Wait
? BoundedChannelFullMode.Wait
: backpressureStrategy == BackpressureStrategy.DropNewest
? BoundedChannelFullMode.DropWrite
: BoundedChannelFullMode.DropOldest,
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_senderTask = Task.Run(() => ProcessLogEventsAsync(_cts.Token));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
public override async Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return WriteResult.FlowDisabled;
}
var result = WriteResult.Success;
foreach (var logEvent in logEvents.Span)
{
if (!IsLogLevelEnabled(logEvent))
{
continue;
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
}
else
{
Interlocked.Increment(ref DroppedCount);
result = WriteResult.Dropped;
}
}
return result;
}
private async Task ProcessLogEventsAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
var sb = new StringBuilder(8192);
while (!cancellationToken.IsCancellationRequested)
{
try
{
await EnsureConnectedAsync(cancellationToken);
await foreach (var logEvent in _channel.Reader.ReadAllAsync(cancellationToken))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize || _channel.Reader.Count == 0)
{
await SendBatchAsync(batch, sb, cancellationToken);
batch.Clear();
sb.Clear();
}
}
}
catch (Exception ex)
{
Console.Error.WriteLine($"SyslogTcpFlow error: {ex.Message}");
await Task.Delay(1000, cancellationToken);
_tcpClient?.Dispose();
_tcpClient = null;
}
}
}
private async Task EnsureConnectedAsync(CancellationToken cancellationToken)
{
if (_tcpClient != null && _tcpClient.Connected)
{
return;
}
if (_stream != null)
{
_stream.Dispose();
_stream = null;
}
if (_tcpClient != null)
{
_tcpClient.Dispose();
_tcpClient = null;
}
_tcpClient = new TcpClient();
_tcpClient.NoDelay = true;
await _tcpClient.ConnectAsync(_host, _port).ConfigureAwait(false);
var networkStream = _tcpClient.GetStream();
if (_useTls)
{
var sslStream = new SslStream(
networkStream,
false,
_certValidationCallback);
sslStream.AuthenticateAsClient(
_host,
_clientCertificates,
SslProtocols.Tls12,
checkCertificateRevocation: true);
_stream = sslStream;
}
else
{
_stream = networkStream;
}
}
private static bool DefaultCertificateValidation(object sender, X509Certificate certificate, X509Chain chain, SslPolicyErrors sslPolicyErrors)
{
return sslPolicyErrors == SslPolicyErrors.None;
}
private async Task SendBatchAsync(List<LogEvent> batch, StringBuilder sb, CancellationToken cancellationToken)
{
foreach (var logEvent in batch)
{
FormatSyslogEvent(logEvent, sb);
sb.AppendLine();
}
if (_stream != null)
{
var data = Encoding.UTF8.GetBytes(sb.ToString());
await _stream.WriteAsync(data, 0, data.Length, cancellationToken).ConfigureAwait(false);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void FormatSyslogEvent(LogEvent logEvent, StringBuilder sb)
{
// Simple RFC 3164-style format: <PRI>timestamp hostname tag: message
// Here we use facility=1 (user-level messages) and map severity from log level
int severity = logEvent.Level switch
{
LogLevel.Trace => 7,
LogLevel.Debug => 7,
LogLevel.Information => 6,
LogLevel.Warning => 4,
LogLevel.Error => 3,
LogLevel.Critical => 2,
_ => 6
};
int facility = 1; // user-level messages
int pri = facility * 8 + severity;
var dt = LogEvent.GetDateTime(logEvent.Timestamp);
sb.Append('<').Append(pri).Append('>');
sb.Append(dt.ToString("MMM dd HH:mm:ss")); // RFC 3164 timestamp
sb.Append(" ").Append(Environment.MachineName);
sb.Append(" ").Append(string.IsNullOrEmpty(logEvent.Category) ? "SyslogTcpFlow" : logEvent.Category);
sb.Append(": ").Append(logEvent.Message);
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try { await _senderTask.ConfigureAwait(false); } catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try { await _senderTask.ConfigureAwait(false); } catch { }
_stream?.Dispose();
_tcpClient?.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,235 @@
using EonaCat.LogStack.Core;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Net.Sockets;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class SyslogUdpFlow : FlowBase
{
private const int DefaultBatchSize = 256;
private const int ChannelCapacity = 4096;
private const int MaxUdpPacketSize = 4096;
private readonly Channel<LogEvent> _channel;
private readonly Task _senderTask;
private readonly CancellationTokenSource _cts;
private readonly string _host;
private readonly int _port;
private UdpClient? _udpClient;
private readonly BackpressureStrategy _backpressureStrategy;
public SyslogUdpFlow(
string host,
int port = 514,
LogLevel minimumLevel = LogLevel.Trace,
BackpressureStrategy backpressureStrategy = BackpressureStrategy.DropOldest)
: base($"SyslogUDP:{host}:{port}", minimumLevel)
{
_host = host ?? throw new ArgumentNullException(nameof(host));
_port = port;
_backpressureStrategy = backpressureStrategy;
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = backpressureStrategy switch
{
BackpressureStrategy.Wait => BoundedChannelFullMode.Wait,
BackpressureStrategy.DropNewest => BoundedChannelFullMode.DropWrite,
BackpressureStrategy.DropOldest => BoundedChannelFullMode.DropOldest,
_ => BoundedChannelFullMode.Wait
},
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_udpClient = new UdpClient();
_senderTask = Task.Run(() => ProcessLogEventsAsync(_cts.Token));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
public override async Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return WriteResult.FlowDisabled;
}
var result = WriteResult.Success;
foreach (var logEvent in logEvents.Span)
{
if (!IsLogLevelEnabled(logEvent))
{
continue;
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
}
else
{
Interlocked.Increment(ref DroppedCount);
result = WriteResult.Dropped;
}
}
return result;
}
private async Task ProcessLogEventsAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
var sb = new StringBuilder(8192);
while (!cancellationToken.IsCancellationRequested)
{
try
{
await foreach (var logEvent in _channel.Reader.ReadAllAsync(cancellationToken))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize || _channel.Reader.Count == 0)
{
await SendBatchAsync(batch, sb, cancellationToken);
batch.Clear();
sb.Clear();
}
}
}
catch (Exception ex)
{
Console.Error.WriteLine($"SyslogUdpFlow error: {ex.Message}");
await Task.Delay(500, cancellationToken);
}
}
}
private async Task SendBatchAsync(List<LogEvent> batch, StringBuilder sb, CancellationToken cancellationToken)
{
foreach (var logEvent in batch)
{
FormatSyslogEvent(logEvent, sb);
sb.AppendLine();
}
if (_udpClient == null)
{
return;
}
var data = Encoding.UTF8.GetBytes(sb.ToString());
if (data.Length <= MaxUdpPacketSize)
{
try
{
await _udpClient.SendAsync(data, data.Length, _host, _port);
}
catch
{
// UDP send errors are ignored
}
}
else
{
await SendUdpInChunksAsync(data, MaxUdpPacketSize, cancellationToken);
}
}
private async Task SendUdpInChunksAsync(byte[] data, int chunkSize, CancellationToken cancellationToken)
{
int offset = 0;
byte[] buffer = ArrayPool<byte>.Shared.Rent(chunkSize);
try
{
while (offset < data.Length)
{
int size = Math.Min(chunkSize, data.Length - offset);
Buffer.BlockCopy(data, offset, buffer, 0, size);
await _udpClient.SendAsync(buffer, size, _host, _port);
offset += size;
}
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void FormatSyslogEvent(LogEvent logEvent, StringBuilder sb)
{
int severity = logEvent.Level switch
{
LogLevel.Trace => 7,
LogLevel.Debug => 7,
LogLevel.Information => 6,
LogLevel.Warning => 4,
LogLevel.Error => 3,
LogLevel.Critical => 2,
_ => 6
};
int facility = 1;
int pri = facility * 8 + severity;
var dt = LogEvent.GetDateTime(logEvent.Timestamp);
sb.Append('<').Append(pri).Append('>');
sb.Append(dt.ToString("MMM dd HH:mm:ss"));
sb.Append(" ").Append(Environment.MachineName);
sb.Append(" ").Append(string.IsNullOrEmpty(logEvent.Category) ? "SyslogUdpFlow" : logEvent.Category);
sb.Append(": ").Append(logEvent.Message);
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try { await _senderTask.ConfigureAwait(false); } catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try { await _senderTask.ConfigureAwait(false); } catch { }
_udpClient?.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,286 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.Flows;
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Security;
using System.Net.Sockets;
using System.Runtime.CompilerServices;
using System.Security.Authentication;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows;
public sealed class TcpFlow : FlowBase
{
private const int DefaultBatchSize = 256;
private const int ChannelCapacity = 4096;
private readonly Channel<LogEvent> _channel;
private readonly Task _senderTask;
private readonly CancellationTokenSource _cts;
private readonly string _host;
private readonly int _port;
private TcpClient? _tcpClient;
private Stream _stream;
private readonly bool _useTls;
private readonly RemoteCertificateValidationCallback _certValidationCallback;
private readonly X509CertificateCollection _clientCertificates;
private readonly BackpressureStrategy _backpressureStrategy;
public TcpFlow(
string host,
int port,
LogLevel minimumLevel = LogLevel.Trace,
BackpressureStrategy backpressureStrategy = BackpressureStrategy.DropOldest,
bool useTls = false,
RemoteCertificateValidationCallback certValidationCallback = null,
X509CertificateCollection clientCertificates = null)
: base($"TCP:{host}:{port}", minimumLevel)
{
_host = host ?? throw new ArgumentNullException(nameof(host));
_port = port;
_backpressureStrategy = backpressureStrategy;
_useTls = useTls;
_certValidationCallback = certValidationCallback ?? DefaultCertificateValidation;
_clientCertificates = clientCertificates;
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = backpressureStrategy == BackpressureStrategy.Wait
? BoundedChannelFullMode.Wait
: backpressureStrategy == BackpressureStrategy.DropNewest
? BoundedChannelFullMode.DropWrite
: BoundedChannelFullMode.DropOldest,
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_senderTask = Task.Run(() => ProcessLogEventsAsync(_cts.Token));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
public async Task<WriteResult> SendFileAsync(string filePath, CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return WriteResult.FlowDisabled;
}
if (string.IsNullOrWhiteSpace(filePath) || !File.Exists(filePath))
{
return WriteResult.Failed;
}
try
{
// Ensure TCP connection
await EnsureConnectedAsync(cancellationToken);
// Send file in chunks
using (var fileStream = new FileStream(filePath, FileMode.Open, FileAccess.Read))
{
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = await fileStream.ReadAsync(buffer, 0, buffer.Length, cancellationToken)) > 0)
{
await _stream.WriteAsync(buffer, 0, bytesRead, cancellationToken);
await _stream.FlushAsync(cancellationToken);
}
}
return WriteResult.Success;
}
catch (Exception exception)
{
Console.Error.WriteLine($"TcpFlow error: Error while sending file: {exception.Message}");
return WriteResult.Failed;
}
}
private static bool DefaultCertificateValidation(
object sender,
X509Certificate certificate,
X509Chain chain,
SslPolicyErrors sslPolicyErrors)
{
return sslPolicyErrors == SslPolicyErrors.None;
}
public override async Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return WriteResult.FlowDisabled;
}
var result = WriteResult.Success;
foreach (var logEvent in logEvents.Span)
{
if (!IsLogLevelEnabled(logEvent))
{
continue;
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
}
else
{
Interlocked.Increment(ref DroppedCount);
result = WriteResult.Dropped;
}
}
return result;
}
private async Task ProcessLogEventsAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
var sb = new StringBuilder(8192);
while (!cancellationToken.IsCancellationRequested)
{
try
{
await EnsureConnectedAsync(cancellationToken);
await foreach (var logEvent in _channel.Reader.ReadAllAsync(cancellationToken))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize || _channel.Reader.Count == 0)
{
await SendBatchAsync(batch, sb, cancellationToken);
batch.Clear();
sb.Clear();
}
}
}
catch (Exception ex)
{
Console.Error.WriteLine($"TcpFlow error: {ex.Message}");
await Task.Delay(1000, cancellationToken); // Retry after delay
_tcpClient?.Dispose();
_tcpClient = null;
}
}
}
private async Task EnsureConnectedAsync(CancellationToken cancellationToken)
{
if (_tcpClient != null && _tcpClient.Connected)
{
return;
}
_stream?.Dispose();
_tcpClient?.Dispose();
_tcpClient = null;
_tcpClient = new TcpClient { NoDelay = true }; // lower latency
await _tcpClient.ConnectAsync(_host, _port).ConfigureAwait(false);
var networkStream = _tcpClient.GetStream();
if (_useTls)
{
var sslStream = new SslStream(
networkStream,
false,
_certValidationCallback);
await sslStream.AuthenticateAsClientAsync(
_host,
_clientCertificates,
SslProtocols.Tls12,
checkCertificateRevocation: true).ConfigureAwait(false);
_stream = sslStream;
}
else
{
_stream = networkStream;
}
}
private async Task SendBatchAsync(List<LogEvent> batch, StringBuilder sb, CancellationToken cancellationToken)
{
foreach (var logEvent in batch)
{
FormatLogEvent(logEvent, sb);
sb.AppendLine();
}
if (_stream != null)
{
var data = Encoding.UTF8.GetBytes(sb.ToString());
await _stream.WriteAsync(data, 0, data.Length, cancellationToken).ConfigureAwait(false);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void FormatLogEvent(LogEvent logEvent, StringBuilder sb)
{
var dt = LogEvent.GetDateTime(logEvent.Timestamp);
sb.Append(dt.ToString("yyyy-MM-dd HH:mm:ss.fff"));
sb.Append(" [");
sb.Append(logEvent.Level.ToString().ToUpperInvariant());
sb.Append("] ");
if (!string.IsNullOrEmpty(logEvent.Category))
{
sb.Append(logEvent.Category);
sb.Append(": ");
}
sb.Append(logEvent.Message);
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try { await _senderTask.ConfigureAwait(false); } catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try { await _senderTask.ConfigureAwait(false); } catch { }
_stream?.Dispose();
_tcpClient?.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}

View File

@@ -0,0 +1,181 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// logging flow that sends messages to a Telegram chat via a bot.
/// </summary>
public sealed class TelegramFlow : FlowBase, IAsyncDisposable
{
private const int ChannelCapacity = 4096;
private const int DefaultBatchSize = 5;
private readonly Channel<LogEvent> _channel;
private readonly Task _workerTask;
private readonly CancellationTokenSource _cts;
private readonly HttpClient _httpClient;
private readonly string _botToken;
private readonly string _chatId;
public TelegramFlow(
string botToken,
string chatId,
LogLevel minimumLevel = LogLevel.Information)
: base("Telegram", minimumLevel)
{
_botToken = botToken ?? throw new ArgumentNullException(nameof(botToken));
_chatId = chatId ?? throw new ArgumentNullException(nameof(chatId));
_httpClient = new HttpClient();
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = BoundedChannelFullMode.DropOldest,
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_workerTask = Task.Run(() => ProcessQueueAsync(_cts.Token));
}
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
private async Task ProcessQueueAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
try
{
while (await _channel.Reader.WaitToReadAsync(cancellationToken))
{
while (_channel.Reader.TryRead(out var logEvent))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize)
{
await SendBatchAsync(batch, cancellationToken);
batch.Clear();
}
}
if (batch.Count > 0)
{
await SendBatchAsync(batch, cancellationToken);
batch.Clear();
}
}
if (batch.Count > 0)
{
await SendBatchAsync(batch, cancellationToken);
}
}
catch (OperationCanceledException) { }
catch (Exception ex)
{
Console.Error.WriteLine($"TelegramFlow error: {ex.Message}");
}
}
private async Task SendBatchAsync(List<LogEvent> batch, CancellationToken cancellationToken)
{
foreach (var logEvent in batch)
{
var message = BuildMessage(logEvent);
var url = $"https://api.telegram.org/bot{_botToken}/sendMessage";
var payload = new
{
chat_id = _chatId,
text = message,
parse_mode = "Markdown"
};
var json = JsonHelper.ToJson(payload);
using var content = new StringContent(json, Encoding.UTF8, "application/json");
await _httpClient.PostAsync(url, content, cancellationToken);
}
}
private static string BuildMessage(LogEvent logEvent)
{
var sb = new StringBuilder();
sb.Append($"*{logEvent.Level}* | {logEvent.Category}\n");
sb.Append($"`{LogEvent.GetDateTime(logEvent.Timestamp):yyyy-MM-dd HH:mm:ss.fff}`\n");
sb.Append(logEvent.Message);
if (logEvent.Exception != null)
{
sb.Append($"\n*Exception:* `{logEvent.Exception.GetType().FullName}`\n");
sb.Append($"`{logEvent.Exception.Message}`\n");
}
if (logEvent.Properties.Count > 0)
{
sb.Append("\n*Properties:*");
foreach (var prop in logEvent.Properties)
{
sb.Append($"\n`{prop.Key}` = `{prop.Value?.ToString() ?? "null"}`");
}
}
return sb.ToString();
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try
{
await _workerTask.ConfigureAwait(false);
}
catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try
{
await _workerTask.ConfigureAwait(false);
}
catch { }
_httpClient.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,303 @@
using EonaCat.LogStack.Core;
using EonaCat.LogStack.EonaCatLogStackCore;
using Microsoft.Extensions.Primitives;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// A decorator flow that applies per-level rate limiting (token bucket) to any
/// inner flow. Prevents log storms from overwhelming downstream sinks (e.g. Slack,
/// HTTP, email) while ensuring that at least one event of each pattern gets through.
///
/// Also supports deduplication: identical messages within a window are collapsed
/// into a single entry with a repeat-count.
/// </summary>
public sealed class ThrottledFlow : FlowBase
{
private sealed class Bucket
{
public double Tokens;
public DateTime LastRefill;
public readonly double Capacity;
public readonly double RefillPerSecond;
public Bucket(double capacity, double refillPerSecond)
{
Capacity = capacity;
RefillPerSecond = refillPerSecond;
Tokens = capacity;
LastRefill = DateTime.UtcNow;
}
/// Returns true and consumes a token if available.
public bool TryConsume()
{
DateTime now = DateTime.UtcNow;
double elapsed = (now - LastRefill).TotalSeconds;
Tokens = Math.Min(Capacity, Tokens + elapsed * RefillPerSecond);
LastRefill = now;
if (Tokens >= 1.0) { Tokens -= 1.0; return true; }
return false;
}
}
private sealed class DedupEntry
{
public int Count;
public DateTime FirstSeen;
public LogEvent LastEvent;
}
private readonly IFlow _inner;
private readonly int _burstCapacity;
private readonly double _refillPerSecond;
private readonly bool _deduplicate;
private readonly TimeSpan _dedupWindow;
private readonly int _dedupMaxKeys;
private readonly Dictionary<LogLevel, Bucket> _buckets
= new Dictionary<LogLevel, Bucket>();
private readonly Dictionary<string, DedupEntry> _dedupMap
= new Dictionary<string, DedupEntry>(StringComparer.Ordinal);
private readonly object _lock = new object();
private long _throttledCount;
/// <param name="inner">The downstream flow to protect.</param>
/// <param name="burstCapacity">
/// Max events that can be emitted in a burst per level (token bucket capacity).
/// </param>
/// <param name="refillPerSecond">
/// How many tokens are added per second per level. E.g. 5.0 = 5 events/second steady state.
/// </param>
/// <param name="deduplicate">
/// If true, identical messages within <paramref name="dedupWindow"/> are collapsed.
/// The suppressed count is appended to the message when the window expires.
/// </param>
/// <param name="dedupWindow">Deduplication window (default 60 s).</param>
/// <param name="dedupMaxKeys">Maximum number of distinct messages tracked (default 1000).</param>
/// <param name="minimumLevel">Minimum level this flow processes.</param>
public ThrottledFlow(
IFlow inner,
int burstCapacity = 10,
double refillPerSecond = 1.0,
bool deduplicate = false,
TimeSpan dedupWindow = default(TimeSpan),
int dedupMaxKeys = 1000,
LogLevel minimumLevel = LogLevel.Trace)
: base("Throttled:" + (inner != null ? inner.GetType().Name : "null"), minimumLevel)
{
if (inner == null)
{
throw new ArgumentNullException("inner");
}
_inner = inner;
_burstCapacity = burstCapacity < 1 ? 1 : burstCapacity;
_refillPerSecond = refillPerSecond <= 0 ? 1.0 : refillPerSecond;
_deduplicate = deduplicate;
_dedupWindow = dedupWindow == default(TimeSpan) ? TimeSpan.FromSeconds(60) : dedupWindow;
_dedupMaxKeys = dedupMaxKeys < 1 ? 1 : dedupMaxKeys;
// Pre-create buckets for all defined levels
foreach (LogLevel level in Enum.GetValues(typeof(LogLevel)))
{
_buckets[level] = new Bucket(_burstCapacity, _refillPerSecond);
}
}
/// <summary>Events throttled (dropped by rate limit or dedup) so far.</summary>
public long ThrottledCount { get { return Interlocked.Read(ref _throttledCount); } }
public override async Task<WriteResult> BlastAsync(
LogEvent logEvent,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return WriteResult.LevelFiltered;
}
lock (_lock)
{
// deduplication pass
if (_deduplicate)
{
string key = MakeDedupKey(logEvent);
DedupEntry entry;
// Flush expired entries to avoid unbounded growth
if (_dedupMap.Count >= _dedupMaxKeys)
{
PurgeExpiredDedupEntries();
}
if (_dedupMap.TryGetValue(key, out entry))
{
TimeSpan age = DateTime.UtcNow - entry.FirstSeen;
if (age < _dedupWindow)
{
entry.Count++;
entry.LastEvent = logEvent;
Interlocked.Increment(ref _throttledCount);
return WriteResult.Dropped;
}
else
{
// Window expired: flush the suppressed count as a synthetic event
if (entry.Count > 1)
{
FlushDedupEntry(key, entry);
}
_dedupMap.Remove(key);
}
}
// First occurrence
_dedupMap[key] = new DedupEntry
{
Count = 1,
FirstSeen = DateTime.UtcNow,
LastEvent = logEvent
};
}
// token bucket pass
Bucket bucket;
if (!_buckets.TryGetValue(logEvent.Level, out bucket))
{
bucket = new Bucket(_burstCapacity, _refillPerSecond);
_buckets[logEvent.Level] = bucket;
}
if (!bucket.TryConsume())
{
Interlocked.Increment(ref _throttledCount);
Interlocked.Increment(ref DroppedCount);
return WriteResult.Dropped;
}
}
WriteResult result = await _inner.BlastAsync(logEvent, cancellationToken).ConfigureAwait(false);
Interlocked.Increment(ref BlastedCount);
return result;
}
public override async Task<WriteResult> BlastBatchAsync(
ReadOnlyMemory<LogEvent> logEvents,
CancellationToken cancellationToken = default(CancellationToken))
{
if (!IsEnabled)
{
return WriteResult.FlowDisabled;
}
WriteResult result = WriteResult.Success;
foreach (LogEvent e in logEvents.ToArray())
{
WriteResult r = await BlastAsync(e, cancellationToken).ConfigureAwait(false);
if (r == WriteResult.Dropped)
{
result = WriteResult.Dropped;
}
}
return result;
}
public override Task FlushAsync(CancellationToken cancellationToken = default(CancellationToken))
{
// Flush all pending dedup entries
lock (_lock)
{
List<string> keys = new List<string>(_dedupMap.Keys);
foreach (string key in keys)
{
DedupEntry entry;
if (_dedupMap.TryGetValue(key, out entry) && entry.Count > 1)
{
FlushDedupEntry(key, entry);
}
_dedupMap.Remove(key);
}
}
return _inner.FlushAsync(cancellationToken);
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
await FlushAsync().ConfigureAwait(false);
await base.DisposeAsync().ConfigureAwait(false);
}
private static string MakeDedupKey(LogEvent log)
{
// Key = level + category + first 200 chars of message (ignore dynamic parts like timestamps)
string msg = log.Message.Length > 0 ? log.Message.ToString() : string.Empty;
if (msg.Length > 200)
{
msg = msg.Substring(0, 200);
}
return log.Level + "|" + (log.Category ?? string.Empty) + "|" + msg;
}
private void FlushDedupEntry(string key, DedupEntry entry)
{
// Build a synthetic event that summarises the suppressed repeats
string original = entry.LastEvent.Message.Length > 0
? entry.LastEvent.Message.ToString()
: string.Empty;
string summary = original + " [repeated " + (entry.Count - 1) + " more times in "
+ (int)_dedupWindow.TotalSeconds + "s window]";
LogEvent synth = new LogEvent
{
Level = entry.LastEvent.Level,
Category = entry.LastEvent.Category,
Timestamp = entry.LastEvent.Timestamp,
Message = new StringSegment(summary),
Exception = entry.LastEvent.Exception
};
try { _inner.BlastAsync(synth).GetAwaiter().GetResult(); }
catch { /* best-effort */ }
}
private void PurgeExpiredDedupEntries()
{
List<string> expired = new List<string>();
DateTime cutoff = DateTime.UtcNow - _dedupWindow;
foreach (KeyValuePair<string, DedupEntry> kv in _dedupMap)
{
if (kv.Value.FirstSeen < cutoff)
{
expired.Add(kv.Key);
}
}
foreach (string k in expired)
{
DedupEntry entry;
if (_dedupMap.TryGetValue(k, out entry) && entry.Count > 1)
{
FlushDedupEntry(k, entry);
}
_dedupMap.Remove(k);
}
}
}
}

View File

@@ -0,0 +1,204 @@
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Net.Sockets;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class UdpFlow : FlowBase
{
private const int DefaultBatchSize = 256;
private const int ChannelCapacity = 4096;
private readonly Channel<LogEvent> _channel;
private readonly Task _senderTask;
private readonly CancellationTokenSource _cts;
private readonly string _host;
private readonly int _port;
private readonly UdpClient _udpClient;
private readonly BackpressureStrategy _backpressureStrategy;
private readonly TimeSpan _flushInterval;
private readonly Task _flushTask;
public UdpFlow(
string host,
int port,
int flushIntervalInMilliseconds = 2000,
LogLevel minimumLevel = LogLevel.Trace,
BackpressureStrategy backpressureStrategy = BackpressureStrategy.DropOldest)
: base($"UDP:{host}:{port}", minimumLevel)
{
_host = host ?? throw new ArgumentNullException(nameof(host));
_port = port;
_backpressureStrategy = backpressureStrategy;
_flushInterval = TimeSpan.FromMilliseconds(flushIntervalInMilliseconds);
_udpClient = new UdpClient();
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = backpressureStrategy switch
{
BackpressureStrategy.Wait => BoundedChannelFullMode.Wait,
BackpressureStrategy.DropNewest => BoundedChannelFullMode.DropWrite,
BackpressureStrategy.DropOldest => BoundedChannelFullMode.DropOldest,
_ => BoundedChannelFullMode.Wait
},
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_senderTask = Task.Run(() => ProcessLogEventsAsync(_cts.Token));
if (flushIntervalInMilliseconds > 0)
{
_flushTask = Task.Run(() => PeriodicFlushAsync(_cts.Token));
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
public override async Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return WriteResult.FlowDisabled;
}
var result = WriteResult.Success;
foreach (var logEvent in logEvents.Span)
{
if (!IsLogLevelEnabled(logEvent))
{
continue;
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
}
else
{
Interlocked.Increment(ref DroppedCount);
result = WriteResult.Dropped;
}
}
return result;
}
private async Task ProcessLogEventsAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
var sb = new StringBuilder(8192);
try
{
await foreach (var logEvent in _channel.Reader.ReadAllAsync(cancellationToken))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize || _channel.Reader.Count == 0)
{
await SendBatchAsync(batch, sb, cancellationToken);
batch.Clear();
sb.Clear();
}
}
if (batch.Count > 0)
{
await SendBatchAsync(batch, sb, CancellationToken.None);
}
}
catch (OperationCanceledException) { }
catch (Exception ex)
{
Console.Error.WriteLine($"UdpFlow error: {ex.Message}");
}
}
private async Task SendBatchAsync(List<LogEvent> batch, StringBuilder sb, CancellationToken cancellationToken)
{
foreach (var logEvent in batch)
{
FormatLogEvent(logEvent, sb);
sb.AppendLine();
}
var data = Encoding.UTF8.GetBytes(sb.ToString());
await _udpClient.SendAsync(data, data.Length, _host, _port);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void FormatLogEvent(LogEvent logEvent, StringBuilder sb)
{
var dt = LogEvent.GetDateTime(logEvent.Timestamp);
sb.Append(dt.ToString("yyyy-MM-dd HH:mm:ss.fff"));
sb.Append(" [");
sb.Append(logEvent.Level.ToString().ToUpperInvariant());
sb.Append("] ");
if (!string.IsNullOrEmpty(logEvent.Category))
{
sb.Append(logEvent.Category);
sb.Append(": ");
}
sb.Append(logEvent.Message);
}
private async Task PeriodicFlushAsync(CancellationToken token)
{
while (!token.IsCancellationRequested)
{
await Task.Delay(_flushInterval, token);
}
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try { await _senderTask.ConfigureAwait(false); } catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try { await _senderTask.ConfigureAwait(false); } catch { }
_udpClient.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}

View File

@@ -0,0 +1,85 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public class WebhookFlow : FlowBase
{
private readonly string _webhookUrl;
private readonly HttpClient _httpClient;
private readonly int _maxRetries;
private readonly TimeSpan _retryDelay;
public WebhookFlow(string webhookUrl, LogLevel minimumLevel = LogLevel.Trace, int maxRetries = 3, TimeSpan? retryDelay = null) : base($"Webhook:{webhookUrl}", minimumLevel)
{
_webhookUrl = webhookUrl ?? throw new ArgumentNullException(nameof(webhookUrl));
_httpClient = new HttpClient();
_maxRetries = maxRetries;
_retryDelay = retryDelay ?? TimeSpan.FromSeconds(1);
}
public override async Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return WriteResult.LevelFiltered;
}
var logPayload = new
{
Timestamp = LogEvent.GetDateTime(logEvent.Timestamp),
Level = logEvent.Level.ToString(),
Message = logEvent.Message,
Category = logEvent.Category,
LogEvent = logEvent
};
var jsonPayload = JsonHelper.ToJson(logPayload);
var content = new StringContent(jsonPayload, Encoding.UTF8, "application/json");
int attempt = 0;
while (attempt < _maxRetries)
{
try
{
var response = await _httpClient.PostAsync(_webhookUrl, content, cancellationToken);
if (response.IsSuccessStatusCode)
{
return WriteResult.Success;
}
attempt++;
await Task.Delay(_retryDelay, cancellationToken);
}
catch (Exception ex)
{
Console.Error.WriteLine($"WebhookFlow error: {ex.Message}");
attempt++;
await Task.Delay(_retryDelay, cancellationToken);
}
}
return WriteResult.Dropped;
}
public override async ValueTask DisposeAsync()
{
_httpClient.Dispose();
await base.DisposeAsync();
}
public override Task FlushAsync(CancellationToken cancellationToken = default)
{
return Task.CompletedTask;
}
}
}

View File

@@ -0,0 +1,233 @@
using EonaCat.Json;
using EonaCat.LogStack.Core;
using System;
using System.Collections.Generic;
using System.Net.Sockets;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public sealed class ZabbixFlow : FlowBase
{
private const int DefaultBatchSize = 256;
private const int ChannelCapacity = 4096;
private readonly Channel<LogEvent> _channel;
private readonly Task _senderTask;
private readonly CancellationTokenSource _cts;
private readonly string _host;
private readonly int _port;
private TcpClient? _tcpClient;
private NetworkStream? _stream;
private readonly BackpressureStrategy _backpressureStrategy;
private readonly string _zabbixHostName;
private readonly string _zabbixKey;
public ZabbixFlow(
string host,
int port = 10051,
string zabbixHostName = null,
string zabbixKey = "log_event",
LogLevel minimumLevel = LogLevel.Trace,
BackpressureStrategy backpressureStrategy = BackpressureStrategy.DropOldest)
: base($"Zabbix:{host}:{port}", minimumLevel)
{
_host = host ?? throw new ArgumentNullException(nameof(host));
_port = port;
_backpressureStrategy = backpressureStrategy;
_zabbixHostName = zabbixHostName ?? Environment.MachineName;
_zabbixKey = zabbixKey ?? "log_event";
var channelOptions = new BoundedChannelOptions(ChannelCapacity)
{
FullMode = backpressureStrategy switch
{
BackpressureStrategy.Wait => BoundedChannelFullMode.Wait,
BackpressureStrategy.DropNewest => BoundedChannelFullMode.DropWrite,
BackpressureStrategy.DropOldest => BoundedChannelFullMode.DropOldest,
_ => BoundedChannelFullMode.Wait
},
SingleReader = true,
SingleWriter = false
};
_channel = Channel.CreateBounded<LogEvent>(channelOptions);
_cts = new CancellationTokenSource();
_senderTask = Task.Run(() => ProcessLogEventsAsync(_cts.Token));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default)
{
if (!IsEnabled || !IsLogLevelEnabled(logEvent))
{
return Task.FromResult(WriteResult.LevelFiltered);
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
return Task.FromResult(WriteResult.Success);
}
Interlocked.Increment(ref DroppedCount);
return Task.FromResult(WriteResult.Dropped);
}
public override async Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default)
{
if (!IsEnabled)
{
return WriteResult.FlowDisabled;
}
var result = WriteResult.Success;
foreach (var logEvent in logEvents.Span)
{
if (!IsLogLevelEnabled(logEvent))
{
continue;
}
if (_channel.Writer.TryWrite(logEvent))
{
Interlocked.Increment(ref BlastedCount);
}
else
{
Interlocked.Increment(ref DroppedCount);
result = WriteResult.Dropped;
}
}
return result;
}
private async Task ProcessLogEventsAsync(CancellationToken cancellationToken)
{
var batch = new List<LogEvent>(DefaultBatchSize);
while (!cancellationToken.IsCancellationRequested)
{
try
{
await EnsureConnectedAsync(cancellationToken);
await foreach (var logEvent in _channel.Reader.ReadAllAsync(cancellationToken))
{
batch.Add(logEvent);
if (batch.Count >= DefaultBatchSize || _channel.Reader.Count == 0)
{
await SendBatchAsync(batch, cancellationToken);
batch.Clear();
}
}
}
catch (Exception ex)
{
Console.Error.WriteLine($"ZabbixFlow error: {ex.Message}");
await Task.Delay(1000, cancellationToken);
_tcpClient?.Dispose();
_tcpClient = null;
}
}
}
private async Task EnsureConnectedAsync(CancellationToken cancellationToken)
{
if (_tcpClient != null && _tcpClient.Connected)
{
return;
}
_tcpClient?.Dispose();
_tcpClient = new TcpClient();
await _tcpClient.ConnectAsync(_host, _port);
_stream = _tcpClient.GetStream();
}
private async Task SendBatchAsync(List<LogEvent> batch, CancellationToken cancellationToken)
{
if (_stream == null || batch.Count == 0)
{
return;
}
foreach (var logEvent in batch)
{
var payload = new
{
request = "sender data",
data = new[]
{
new {
host = _zabbixHostName,
key = _zabbixKey,
value = FormatLogEvent(logEvent)
}
}
};
string json = JsonHelper.ToJson(payload);
byte[] jsonBytes = Encoding.UTF8.GetBytes(json);
// Zabbix protocol header
byte[] header = new byte[13]; // "ZBXD\1" + 8 bytes length
header[0] = (byte)'Z';
header[1] = (byte)'B';
header[2] = (byte)'X';
header[3] = (byte)'D';
header[4] = 1;
long length = jsonBytes.Length;
for (int i = 0; i < 8; i++)
{
header[5 + i] = (byte)(length >> (8 * i) & 0xFF);
}
await _stream.WriteAsync(header, 0, header.Length, cancellationToken);
await _stream.WriteAsync(jsonBytes, 0, jsonBytes.Length, cancellationToken);
await _stream.FlushAsync(cancellationToken);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private string FormatLogEvent(LogEvent logEvent)
{
var dt = LogEvent.GetDateTime(logEvent.Timestamp);
string ts = dt.ToString("yyyy-MM-dd HH:mm:ss.fff");
string category = string.IsNullOrEmpty(logEvent.Category) ? "ZabbixFlow" : logEvent.Category;
return $"{ts} [{logEvent.Level}] {category}: {logEvent.Message}";
}
public override async Task FlushAsync(CancellationToken cancellationToken = default)
{
_channel.Writer.Complete();
try { await _senderTask.ConfigureAwait(false); } catch { }
}
public override async ValueTask DisposeAsync()
{
IsEnabled = false;
_channel.Writer.Complete();
_cts.Cancel();
try { await _senderTask.ConfigureAwait(false); } catch { }
_stream?.Dispose();
_tcpClient?.Dispose();
_cts.Dispose();
await base.DisposeAsync();
}
}
}

View File

@@ -0,0 +1,26 @@
using EonaCat.LogStack.Core;
namespace EonaCat.LogStack.Boosters;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Boosters enrich log events with additional context or transform them before they reach flows.
/// Boosters are designed for zero-allocation where possible.
/// </summary>
public interface IBooster
{
/// <summary>
/// Gets the name of this booster for identification
/// </summary>
string Name { get; }
/// <summary>
/// Boost a log event with additional data or transforms it.
/// Return false to filter out the event entirely.
/// </summary>
/// <param name="builder">Builder to modify the log event</param>
/// <returns>True to continue processing, false to filter out the event</returns>
bool Boost(ref LogEventBuilder builder);
}

View File

@@ -0,0 +1,126 @@
using EonaCat.LogStack.Core;
using System;
using System.Threading;
using System.Threading.Tasks;
namespace EonaCat.LogStack.Flows;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Flows are output destinations for log events (replacement for "sinks").
/// Each flow handles writing log events to a specific destination with optimized batching.
/// </summary>
public interface IFlow : IAsyncDisposable
{
/// <summary>
/// Gets the name of this flow for identification
/// </summary>
string Name { get; }
/// <summary>
/// Minimum log level this flow will process
/// </summary>
LogLevel MinimumLevel { get; }
/// <summary>
/// Whether this flow is currently enabled
/// </summary>
bool IsEnabled { get; }
/// <summary>
/// Blast a single log event to this flow
/// </summary>
Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default);
/// <summary>
/// Blast a batch of log events to this flow (more efficient than single blasts)
/// </summary>
Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default);
/// <summary>
/// Flush any buffered log events immediately
/// </summary>
Task FlushAsync(CancellationToken cancellationToken = default);
}
/// <summary>
/// Base class for flows with common functionality
/// </summary>
public abstract class FlowBase : IFlow
{
protected FlowBase(string name, LogLevel minimumLevel = LogLevel.Trace)
{
Name = name ?? throw new ArgumentNullException(nameof(name));
MinimumLevel = minimumLevel;
IsEnabled = true;
}
public string Name { get; }
public LogLevel MinimumLevel { get; protected set; }
public bool IsEnabled { get; protected set; }
protected long DroppedCount;
protected long BlastedCount;
protected bool IsLogLevelEnabled(LogEvent logEvent)
{
return logEvent.Level >= MinimumLevel;
}
public abstract Task<WriteResult> BlastAsync(LogEvent logEvent, CancellationToken cancellationToken = default);
public virtual async Task<WriteResult> BlastBatchAsync(ReadOnlyMemory<LogEvent> logEvents, CancellationToken cancellationToken = default)
{
var result = WriteResult.Success;
var eventsArray = logEvents.ToArray();
foreach (var logEvent in eventsArray)
{
var singleResult = await BlastAsync(logEvent, cancellationToken).ConfigureAwait(false);
if (singleResult != WriteResult.Success)
{
result = singleResult;
}
}
return result;
}
public abstract Task FlushAsync(CancellationToken cancellationToken = default);
public virtual async ValueTask DisposeAsync()
{
IsEnabled = false;
await FlushAsync(default).ConfigureAwait(false);
GC.SuppressFinalize(this);
}
/// <summary>
/// Gets diagnostic information about this flow
/// </summary>
public virtual FlowDiagnostics GetDiagnostics()
{
return new FlowDiagnostics
{
Name = Name,
IsEnabled = IsEnabled,
MinimumLevel = MinimumLevel,
BlastedCount = Interlocked.Read(ref BlastedCount),
DroppedCount = Interlocked.Read(ref DroppedCount)
};
}
}
/// <summary>
/// Diagnostic information about a flow
/// </summary>
public sealed class FlowDiagnostics
{
public string Name { get; set; }
public bool IsEnabled { get; set; }
public LogLevel MinimumLevel { get; set; }
public long BlastedCount { get; set; }
public long DroppedCount { get; set; }
}

View File

@@ -0,0 +1,169 @@
using EonaCat.LogStack.Extensions;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace EonaCat.LogStack.Core;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>
/// Represents a single log event with efficient memory management through pooling.
/// This struct is designed to minimize allocations and support high-throughput logging.
/// </summary>
public struct LogEvent
{
public long Timestamp { get; set; }
public LogLevel Level { get; set; }
public string Category { get; set; }
public ReadOnlyMemory<char> Message { get; set; }
public Exception? Exception { get; set; }
public Dictionary<string, object?> Properties { get; set; }
public string CustomData { get; set; }
public int ThreadId { get; set; }
public ActivityTraceId TraceId { get; set; }
public ActivitySpanId SpanId { get; set; }
/// <summary>
/// Estimated memory size in bytes for backpressure calculations
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int EstimateSize()
{
// Base overhead
int size = 64;
// Message size
size += Message.Length * 2;
// Category size
size += (Category?.Length ?? 0) * 2;
// Exception size (estimated)
if (Exception != null)
{
size += 512;
}
// Properties size
size += Properties.Count * 32;
return size;
}
/// <summary>
/// Creates a timestamp value from DateTime
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static long CreateTimestamp(DateTime dateTime) => dateTime.Ticks;
/// <summary>
/// Converts timestamp back to DateTime
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static DateTime GetDateTime(long timestamp) => new(timestamp);
public bool HasProperties => Properties != null && Properties.Count > 0;
public bool HasCustomData => CustomData != null && CustomData.Length > 0;
public bool HasException => Exception != null;
public bool HasCategory => Category != null;
}
/// <summary>
/// Builder for creating LogEvent instances with minimal allocations
/// </summary>
public struct LogEventBuilder
{
private long _timestamp;
private LogLevel _level;
private string? _category;
private ReadOnlyMemory<char> _message;
private Exception? _exception;
private Dictionary<string, object>? _properties;
private int _threadId;
private ActivityTraceId _traceId;
private ActivitySpanId _spanId;
public LogEventBuilder()
{
_timestamp = DateTime.UtcNow.Ticks;
_level = LogLevel.Information;
_threadId = Environment.CurrentManagedThreadId;
var activity = Activity.Current;
_traceId = activity?.TraceId ?? default;
_spanId = activity?.SpanId ?? default;
}
public string? Category => _category;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public LogEventBuilder WithTimestamp(long timestamp)
{
_timestamp = timestamp;
return this;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public LogEventBuilder WithLevel(LogLevel level)
{
_level = level;
return this;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public LogEventBuilder WithCategory(string category)
{
_category = category;
return this;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public LogEventBuilder WithMessage(ReadOnlyMemory<char> message)
{
_message = message;
return this;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public LogEventBuilder WithMessage(string message)
{
_message = message.AsMemory();
return this;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public LogEventBuilder WithException(Exception? exception)
{
_exception = exception;
return this;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public LogEventBuilder WithProperty(string key, object? value)
{
_properties ??= new Dictionary<string, object>(4);
_properties.TryAdd(key, value);
return this;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public LogEvent Build()
{
return new LogEvent
{
Timestamp = _timestamp,
Level = _level,
Category = _category ?? string.Empty,
Message = _message,
Exception = _exception,
Properties = _properties ?? new Dictionary<string, object>(),
ThreadId = _threadId,
TraceId = _traceId,
SpanId = _spanId
};
}
}

View File

@@ -0,0 +1,23 @@
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
namespace EonaCat.LogStack.EonaCatLogStackCore
{
public struct LogStats
{
public long Written;
public long Dropped;
public long Rotations;
public long BytesWritten;
public double WritesPerSecond;
public LogStats(long written, long dropped, long rotations, long bytesWritten, double writesPerSecond)
{
Written = written;
Dropped = dropped;
Rotations = rotations;
BytesWritten = bytesWritten;
WritesPerSecond = writesPerSecond;
}
}
}

View File

@@ -0,0 +1,25 @@
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
namespace EonaCat.LogStack.EonaCatLogStackCore.Policies
{
/// <summary>Combined retention policy: delete rolled files exceeding any threshold.</summary>
public sealed class FileRetentionPolicy
{
/// <summary>Maximum number of rolled archive files to keep (0 = unlimited).</summary>
public int MaxRolledFiles { get; set; }
/// <summary>Maximum total size of all archives in bytes (0 = unlimited).</summary>
public long MaxTotalArchiveBytes { get; set; }
/// <summary>Maximum age of any archive file in days (0 = unlimited).</summary>
public int MaxAgeDays { get; set; }
public FileRetentionPolicy()
{
MaxRolledFiles = 10;
MaxTotalArchiveBytes = 0;
MaxAgeDays = 0;
}
}
}

View File

@@ -0,0 +1,39 @@
using EonaCat.LogStack.Core;
using System;
using System.Runtime.CompilerServices;
using System.Threading;
namespace EonaCat.LogStack.EonaCatLogStackCore.Policies
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
/// <summary>Log only 1-in-N events, optionally filtered by a predicate.</summary>
public sealed class SamplingPolicy
{
private long _counter;
/// <summary>Keep 1 out of every <see cref="Rate"/> events.</summary>
public int Rate { get; set; }
/// <summary>Optional predicate. Null = apply to all events.</summary>
public Func<LogEvent, bool> Predicate { get; set; }
public SamplingPolicy()
{
Rate = 10;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool ShouldLog(LogEvent e)
{
if (Predicate != null && !Predicate(e))
{
// predicate not matched → always log
return true;
}
return Interlocked.Increment(ref _counter) % Rate == 0;
}
}
}

View File

@@ -0,0 +1,35 @@
using System.Collections.Concurrent;
using System.Text;
namespace EonaCat.LogStack.EonaCatLogStackCore
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
internal static class StringBuilderPool
{
private static readonly ConcurrentBag<StringBuilder> Pool = new ConcurrentBag<StringBuilder>();
private const int InitialCapacity = 4096;
private const int MaxCapacity = 131072; // 128 KB discard oversized builders
public static StringBuilder Rent()
{
StringBuilder sb;
if (Pool.TryTake(out sb))
{
sb.Clear();
return sb;
}
return new StringBuilder(InitialCapacity);
}
public static void Return(StringBuilder sb)
{
if (sb.Capacity <= MaxCapacity)
{
sb.Clear();
Pool.Add(sb);
}
}
}
}

View File

@@ -0,0 +1,14 @@
using System;
namespace EonaCat.LogStack.Extensions;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public static class DateTimeExtensions
{
public static long ToUnixTimestamp(this DateTime dateTime)
{
return (long)(dateTime.ToUniversalTime() - new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc)).TotalSeconds;
}
}

View File

@@ -0,0 +1,19 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace EonaCat.LogStack.Extensions
{
internal static class DictionaryExtensions
{
public static bool TryAdd<TKey, TValue>(this IDictionary<TKey, TValue> dict, TKey key, TValue value)
{
if (!dict.ContainsKey(key))
{
dict[key] = value;
return true;
}
return false;
}
}
}

View File

@@ -0,0 +1,101 @@
using EonaCat.Json;
using System;
using System.Collections;
using System.Diagnostics;
namespace EonaCat.LogStack.Extensions;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public static class ExceptionExtensions
{
public static string FormatExceptionToMessage(this Exception exception, string module = null, string method = null)
{
if (exception == null)
{
return string.Empty;
}
var st = new StackTrace(exception, true);
var frame = st.GetFrame(0);
int fileLine = -1;
string filename = "Unknown";
if (frame != null)
{
fileLine = frame.GetFileLineNumber();
filename = frame.GetFileName();
}
var sb = new StringBuilderChill();
sb.AppendLine();
sb.AppendLine($"--- Exception details provided by {DllInfo.ApplicationName} on {Environment.MachineName} ---");
if (!string.IsNullOrEmpty(module))
{
sb.AppendLine(" Module : " + module);
}
if (!string.IsNullOrEmpty(method))
{
sb.AppendLine(" Method : " + method);
}
sb.Append(" Type : ").AppendLine(exception.GetType().ToString());
sb.Append(" Data : ").AppendLine(exception.Data != null && exception.Data.Count > 0
? FormatExceptionData(exception.Data)
: "(none)");
sb.Append(" Inner : ").AppendLine(exception.InnerException != null
? FormatInnerException(exception.InnerException)
: "(null)");
sb.Append(" Message : ").AppendLine(exception.Message);
sb.Append(" Source : ").AppendLine(exception.Source);
sb.Append(" StackTrace : ").AppendLine(exception.StackTrace);
sb.Append(" Line : ").AppendLine(fileLine.ToString());
sb.Append(" File : ").AppendLine(filename);
sb.Append(" ToString : ").AppendLine(exception.ToString());
sb.AppendLine("---");
return sb.ToString();
}
private static string FormatExceptionData(IDictionary data)
{
var sb = new StringBuilderChill();
foreach (DictionaryEntry entry in data)
{
if (entry.Key != null)
{
sb.Append(" | ")
.Append(entry.Key);
}
if (entry.Value != null)
{
sb.Append(": ")
.AppendLine(entry.Value.ToString());
}
}
return sb.ToString();
}
private static string FormatInnerException(Exception innerException)
{
var sb = new StringBuilderChill();
sb.AppendLine(innerException.GetType().ToString())
.AppendLine(" Message : " + innerException.Message)
.AppendLine(" Source : " + innerException.Source)
.AppendLine(" StackTrace : " + innerException.StackTrace)
.AppendLine(" ToString : " + innerException)
.Append(" Data : ")
.AppendLine(innerException.Data != null && innerException.Data.Count > 0
? FormatExceptionData(innerException.Data)
: "(none)");
return sb.ToString();
}
}

View File

@@ -0,0 +1,843 @@
using EonaCat.Json;
using EonaCat.Json.Serialization;
using Microsoft.Extensions.Logging;
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
using System.Xml.Serialization;
namespace EonaCat.LogStack.Extensions
{
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public enum DumpFormat
{
Json,
Xml,
Tree
}
public static class ObjectExtensions
{
/// <summary>
/// Executes an action on the object if it satisfies a predicate.
/// </summary>
public static T If<T>(this T obj, Func<T, bool> predicate, Action<T> action)
{
if (obj != null && predicate(obj))
{
action(obj);
}
return obj;
}
/// <summary>
/// Executes an action on the object if it does NOT satisfy a predicate.
/// </summary>
public static T IfNot<T>(this T obj, Func<T, bool> predicate, Action<T> action)
{
if (obj != null && !predicate(obj))
{
action(obj);
}
return obj;
}
/// <summary>
/// Executes a function on an object if not null, returns object itself.
/// Useful for chaining.
/// </summary>
public static T Tap<T>(this T obj, Action<T> action)
{
if (obj != null)
{
action(obj);
}
return obj;
}
/// <summary>
/// Returns true if object implements a given interface.
/// </summary>
public static bool Implements<TInterface>(this object obj)
{
if (obj == null)
{
return false;
}
return typeof(TInterface).IsAssignableFrom(obj.GetType());
}
/// <summary>
/// Dumps any object to a string in JSON, XML, or detailed tree format.
/// </summary>
/// <param name="currentObject">Object to dump</param>
/// <param name="format">"json" (default), "xml", or "tree"</param>
/// <param name="detailed">For JSON: include private/internal fields. Ignored for tree format</param>
/// <param name="maxDepth">Optional max depth for tree dump. Null = no limit</param>
/// <param name="maxCollectionItems">Optional max items to display in collections. Null = show all</param>
/// <returns>String representation of the object</returns>
public static string Dump(this object currentObject, DumpFormat format = DumpFormat.Json, bool detailed = false, int? maxDepth = null, int? maxCollectionItems = null)
{
if (currentObject == null)
{
return "null";
}
try
{
switch (format)
{
case DumpFormat.Xml:
return DumpXml(currentObject);
case DumpFormat.Tree:
return DumpTree(currentObject, maxDepth, maxCollectionItems);
case DumpFormat.Json:
default:
return DumpJson(currentObject, detailed);
}
}
catch (Exception ex)
{
return $"Error dumping object: {ex.Message}";
}
}
/// <summary>
/// Returns a default value if the object is null.
/// </summary>
public static T OrDefault<T>(this T obj, T defaultValue = default) =>
obj == null ? defaultValue : obj;
/// <summary>
/// Returns the object if not null; otherwise executes a function to get a fallback.
/// </summary>
public static T OrElse<T>(this T obj, Func<T> fallback) =>
obj != null ? obj : fallback();
/// <summary>
/// Converts an object to JSON string with optional formatting.
/// </summary>
public static string ToJson(this object obj, bool indented = false)
{
try
{
return obj == null
? string.Empty
: !indented ? Json.JsonHelper.ToJson(obj, Formatting.None) : Json.JsonHelper.ToJson(obj, Formatting.Indented);
}
catch
{
return string.Empty;
}
}
/// <summary>
/// Converts an object to string safely, returns empty string if null.
/// </summary>
public static string SafeToString(this object obj) =>
obj?.ToString() ?? string.Empty;
/// <summary>
/// Checks if an object is null or default.
/// </summary>
public static bool IsNullOrDefault<T>(this T obj) =>
EqualityComparer<T>.Default.Equals(obj, default);
/// <summary>
/// Safely casts an object to a specific type, returns default if cast fails.
/// </summary>
public static T SafeCast<T>(this object obj)
{
if (obj is T variable)
{
return variable;
}
return default;
}
/// <summary>
/// Safely tries to convert object to integer.
/// </summary>
public static int ToInt(this object obj, int defaultValue = 0)
{
if (obj == null)
{
return defaultValue;
}
return int.TryParse(obj.ToString(), out var val) ? val : defaultValue;
}
/// <summary>
/// Safely tries to convert object to long.
/// </summary>
public static long ToLong(this object obj, long defaultValue = 0)
{
if (obj == null)
{
return defaultValue;
}
return long.TryParse(obj.ToString(), out var val) ? val : defaultValue;
}
/// <summary>
/// Safely tries to convert object to double.
/// </summary>
public static double ToDouble(this object obj, double defaultValue = 0)
{
if (obj == null)
{
return defaultValue;
}
return double.TryParse(obj.ToString(), out var val) ? val : defaultValue;
}
/// <summary>
/// Safely tries to convert object to bool.
/// </summary>
public static bool ToBool(this object obj, bool defaultValue = false)
{
if (obj == null)
{
return defaultValue;
}
return bool.TryParse(obj.ToString(), out var val) ? val : defaultValue;
}
/// <summary>
/// Checks if an object is of a specific type.
/// </summary>
public static bool IsType<T>(this object obj) => obj is T;
/// <summary>
/// Executes an action if the object is not null.
/// </summary>
public static void IfNotNull<T>(this T obj, Action<T> action)
{
if (obj != null)
{
action(obj);
}
}
/// <summary>
/// Executes an action if the object is null.
/// </summary>
public static void IfNull<T>(this T obj, Action action)
{
if (obj == null)
{
action();
}
}
/// <summary>
/// Wraps the object into a single-item enumerable.
/// </summary>
public static IEnumerable<T> AsEnumerable<T>(this T obj)
{
if (obj != null)
{
yield return obj;
}
}
/// <summary>
/// Safely returns a string representation with max length truncation.
/// </summary>
public static string ToSafeString(this object obj, int maxLength)
{
string str = obj.SafeToString();
return str.Length <= maxLength ? str : str.Substring(0, maxLength);
}
/// <summary>
/// Returns object hash code safely (0 if null).
/// </summary>
public static int SafeHashCode(this object obj) =>
obj?.GetHashCode() ?? 0;
/// <summary>
/// Returns the object or throws a custom exception if null.
/// </summary>
public static T OrThrow<T>(this T obj, Func<Exception> exceptionFactory)
{
if (obj == null)
{
throw exceptionFactory();
}
return obj;
}
private static string DumpJson(object currentObject, bool isDetailed)
{
var settings = new JsonSerializerSettings
{
ReferenceLoopHandling = ReferenceLoopHandling.Ignore,
Formatting = Formatting.Indented
};
if (isDetailed)
{
settings.ContractResolver = new DefaultContractResolver
{
IgnoreSerializableAttribute = false,
IgnoreSerializableInterface = false
};
}
return JsonHelper.ToJson(currentObject, settings);
}
private static string DumpXml(object currentObject)
{
try
{
var xmlSerializer = new XmlSerializer(currentObject.GetType());
using (var stringWriter = new StringWriter())
{
xmlSerializer.Serialize(stringWriter, currentObject);
return stringWriter.ToString();
}
}
catch (Exception ex)
{
return $"XML serialization failed: {ex.Message}";
}
}
private static string DumpTree(object currentObject, int? maxDepth, int? maxCollectionItems)
{
var stringBuilder = new StringBuilder();
var visitedHashSet = new HashSet<object>(new ReferenceEqualityComparer());
DumpTreeInternal(currentObject, stringBuilder, 0, visitedHashSet, maxDepth, maxCollectionItems);
return stringBuilder.ToString();
}
private static void DumpTreeInternal(object currentObject, StringBuilder stringBuilder, int indent, HashSet<object> visited, int? maxDepth, int? maxCollectionItems)
{
string indentation = new string(' ', indent * 2);
if (currentObject == null)
{
stringBuilder.AppendLine($"{indentation}null");
return;
}
Type type = currentObject.GetType();
string typeName = type.FullName;
if (IsPrimitive(type))
{
stringBuilder.AppendLine($"{indentation}{currentObject} ({typeName})");
return;
}
if (visited.Contains(currentObject))
{
stringBuilder.AppendLine($"{indentation}<<circular reference to {typeName}>>");
return;
}
if (maxDepth.HasValue && indent >= maxDepth.Value)
{
stringBuilder.AppendLine($"{indentation}<<max depth reached: {typeName}>>");
return;
}
visited.Add(currentObject);
if (currentObject is IEnumerable enumerable && !(currentObject is string))
{
int count = 0;
foreach (var _ in enumerable)
{
count++;
}
if (maxCollectionItems.HasValue && count > maxCollectionItems.Value)
{
stringBuilder.AppendLine($"{indentation}{typeName} [<<{count} items, collapsed>>]");
return;
}
stringBuilder.AppendLine($"{indentation}{typeName} [");
foreach (var item in enumerable)
{
DumpTreeInternal(item, stringBuilder, indent + 1, visited, maxDepth, maxCollectionItems);
}
stringBuilder.AppendLine($"{indentation}]");
}
else
{
stringBuilder.AppendLine($"{indentation}{typeName} {{");
var flags = BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance;
var members = type.GetFields(flags);
foreach (var field in members)
{
object value = null;
try
{
value = field.GetValue(currentObject);
}
catch
{
value = "<<unavailable>>";
}
stringBuilder.Append($"{indentation} {field.Name} = ");
DumpTreeInternal(value, stringBuilder, indent + 1, visited, maxDepth, maxCollectionItems);
}
var properties = type.GetProperties(flags);
foreach (var current in properties)
{
if (current.GetIndexParameters().Length > 0)
{
continue;
}
object value = null;
try { value = current.GetValue(currentObject); } catch { value = "<<unavailable>>"; }
stringBuilder.Append($"{indentation} {current.Name} = ");
DumpTreeInternal(value, stringBuilder, indent + 1, visited, maxDepth, maxCollectionItems);
}
stringBuilder.AppendLine($"{indentation}}}");
}
}
private static bool IsPrimitive(Type type)
{
return type.IsPrimitive
|| type.IsEnum
|| type == typeof(string)
|| type == typeof(decimal)
|| type == typeof(DateTime)
|| type == typeof(DateTimeOffset)
|| type == typeof(Guid)
|| type == typeof(TimeSpan);
}
private class ReferenceEqualityComparer : IEqualityComparer<object>
{
public new bool Equals(object x, object y) => ReferenceEquals(x, y);
public int GetHashCode(object obj) => System.Runtime.CompilerServices.RuntimeHelpers.GetHashCode(obj);
}
public static void ForEach<T>(this IEnumerable<T> items, Action<T> action)
{
if (items == null || action == null)
{
return;
}
foreach (var item in items)
{
action(item);
}
}
/// <summary>Check if collection is null or empty</summary>
public static bool IsNullOrEmpty<T>(this IEnumerable<T> items) => items == null || !items.Any();
/// <summary>Check if collection has items</summary>
public static bool HasItems<T>(this IEnumerable<T> items) => !items.IsNullOrEmpty();
/// <summary>Safe get by index</summary>
public static T SafeGet<T>(this IList<T> list, int index, T defaultValue = default)
{
if (list == null || index < 0 || index >= list.Count)
{
return defaultValue;
}
return list[index];
}
/// <summary>Convert collection to delimited string</summary>
public static string ToDelimitedString<T>(this IEnumerable<T> items, string delimiter = ", ")
{
return items == null ? "" : string.Join(delimiter, items);
}
public static bool IsNullOrWhiteSpace(this string s) => string.IsNullOrWhiteSpace(s);
public static string Truncate(this string s, int maxLength)
{
if (string.IsNullOrEmpty(s))
{
return s;
}
return s.Length <= maxLength ? s : s.Substring(0, maxLength);
}
public static bool ContainsIgnoreCase(this string s, string value) =>
s?.IndexOf(value ?? "", StringComparison.OrdinalIgnoreCase) >= 0;
public static string OrDefault(this string s, string defaultValue) =>
string.IsNullOrEmpty(s) ? defaultValue : s;
public static bool IsWeekend(this DateTime date) =>
date.DayOfWeek == DayOfWeek.Saturday || date.DayOfWeek == DayOfWeek.Sunday;
public static DateTime StartOfDay(this DateTime date) =>
date.Date;
public static DateTime EndOfDay(this DateTime date) =>
date.Date.AddDays(1).AddTicks(-1);
public static IDisposable BeginLoggingScope(this ILogger logger, object context)
{
if (logger == null || context == null)
{
return null;
}
return logger.BeginScope(context.ToDictionary());
}
public static void LogExecutionTime(this ILogger logger, Action action, string operationName)
{
if (logger == null || action == null)
{
return;
}
var sw = System.Diagnostics.Stopwatch.StartNew();
action();
sw.Stop();
logger.LogInformation("{Operation} executed in {ElapsedMilliseconds}ms", operationName, sw.ElapsedMilliseconds);
}
/// <summary>
/// Converts a Unix timestamp, expressed as the number of seconds since the Unix epoch, to a local DateTime
/// value.
/// </summary>
/// <remarks>The returned DateTime is expressed in the local time zone. To obtain a UTC DateTime,
/// use DateTimeOffset.FromUnixTimeSeconds(timestamp).UtcDateTime instead.</remarks>
/// <param name="timestamp">The Unix timestamp representing the number of seconds that have elapsed since 00:00:00 UTC on 1 January
/// 1970.</param>
/// <returns>A DateTime value that represents the local date and time equivalent of the specified Unix timestamp.</returns>
public static DateTime FromUnixTimestamp(this long timestamp) =>
DateTimeOffset.FromUnixTimeSeconds(timestamp).DateTime;
/// <summary>
/// Executes the specified task without waiting for its completion and handles any exceptions that occur during
/// its execution.
/// </summary>
/// <remarks>Use this method to start a task when you do not need to await its completion but want
/// to ensure that exceptions are observed. This method should be used with caution, as exceptions may be
/// handled asynchronously and may not be propagated to the calling context. Avoid using this method for tasks
/// that must complete before continuing execution.</remarks>
/// <param name="task">The task to execute in a fire-and-forget manner. Cannot be null.</param>
/// <param name="onError">An optional callback that is invoked if the task throws an exception. If not provided, exceptions are
/// written to the console.</param>
public static async void FireAndForget(this Task task, Action<Exception> onError = null)
{
if (task == null)
{
return;
}
try { await task; }
catch (Exception ex)
{
if (onError != null)
{
onError(ex);
}
else
{
Console.WriteLine("FireAndForget Exception: " + ex.FormatExceptionToMessage());
}
}
}
/// <summary>Check if object has property</summary>
public static bool HasProperty(this object obj, string name) =>
obj != null && obj.GetType().GetProperty(name, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) != null;
/// <summary>Get property value safely</summary>
public static object GetPropertyValue(this object obj, string name)
{
if (obj == null)
{
return null;
}
var prop = obj.GetType().GetProperty(name, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance);
return prop?.GetValue(obj);
}
/// <summary>
/// Creates a dictionary containing the public and non-public instance properties and fields of the specified
/// object.
/// </summary>
/// <remarks>Indexed properties are excluded from the resulting dictionary. Both public and
/// non-public instance members are included. If multiple members share the same name, property values will
/// overwrite field values with the same name.</remarks>
/// <param name="obj">The object whose properties and fields are to be included in the dictionary. Can be null.</param>
/// <returns>A dictionary with the names and values of the object's properties and fields. If the object is null, returns
/// an empty dictionary.</returns>
public static Dictionary<string, object> ToDictionary(this object obj)
{
if (obj == null)
{
return new Dictionary<string, object>();
}
var dict = new Dictionary<string, object>();
var flags = BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance;
foreach (var prop in obj.GetType().GetProperties(flags))
{
if (prop.GetIndexParameters().Length > 0)
{
continue;
}
dict[prop.Name] = prop.GetValue(obj);
}
foreach (var field in obj.GetType().GetFields(flags))
{
dict[field.Name] = field.GetValue(obj);
}
return dict;
}
/// <summary>
/// Converts any object to a human-readable log string, including collections and nested objects.
/// </summary>
public static string ToLogString(this object obj, int maxDepth = 3, int currentDepth = 0)
{
if (obj == null)
{
return "null";
}
if (currentDepth >= maxDepth)
{
return "...";
}
// Handle strings separately
if (obj is string str)
{
return str;
}
// Handle IEnumerable
if (obj is IEnumerable enumerable)
{
var items = new List<string>();
foreach (var item in enumerable)
{
items.Add(item.ToLogString(maxDepth, currentDepth + 1));
}
return "[" + string.Join(", ", items) + "]";
}
// Handle primitive types
var type = obj.GetType();
if (type.IsPrimitive || obj is decimal || obj is DateTime || obj is Guid)
{
return obj.ToString();
}
// Handle objects with properties
try
{
var props = type.GetProperties();
var sb = new StringBuilder("{");
bool first = true;
foreach (var p in props)
{
if (!first)
{
sb.Append(", ");
}
var val = p.GetValue(obj);
sb.Append($"{p.Name}={val.ToLogString(maxDepth, currentDepth + 1)}");
first = false;
}
sb.Append("}");
return sb.ToString();
}
catch
{
return obj.ToString();
}
}
/// <summary>
/// Checks if an object is considered "empty": null, empty string, empty collection.
/// </summary>
public static bool IsEmpty(this object obj)
{
if (obj == null)
{
return true;
}
if (obj is string str)
{
return string.IsNullOrWhiteSpace(str);
}
if (obj is ICollection col)
{
return col.Count == 0;
}
if (obj is IEnumerable enumerable)
{
return !enumerable.Cast<object>().Any();
}
return false;
}
/// <summary>
/// Executes an action if the object is not null and not empty.
/// </summary>
public static void IfNotEmpty<T>(this T obj, Action<T> action)
{
if (!obj.IsEmpty())
{
action(obj);
}
}
/// <summary>
/// Returns a default value if the object is null or empty.
/// </summary>
public static T OrDefaultIfEmpty<T>(this T obj, T defaultValue)
{
return obj.IsEmpty() ? defaultValue : obj;
}
/// <summary>
/// Returns true if the object is numeric (int, float, double, decimal, long, etc.).
/// </summary>
public static bool IsNumeric(this object obj)
{
if (obj == null)
{
return false;
}
return double.TryParse(obj.ToString(), out _);
}
/// <summary>
/// Converts an object to a numeric double, returns default if conversion fails.
/// </summary>
public static double ToDoubleSafe(this object obj, double defaultValue = 0)
{
if (obj == null)
{
return defaultValue;
}
return double.TryParse(obj.ToString(), out var d) ? d : defaultValue;
}
/// <summary>
/// Converts an object to a numeric int, returns default if conversion fails.
/// </summary>
public static int ToIntSafe(this object obj, int defaultValue = 0)
{
if (obj == null)
{
return defaultValue;
}
return int.TryParse(obj.ToString(), out var i) ? i : defaultValue;
}
/// <summary>
/// Returns the type name of an object safely.
/// </summary>
public static string GetTypeName(this object obj) =>
obj?.GetType().Name ?? "null";
/// <summary>
/// Executes a function if object is not null and returns a fallback value otherwise.
/// </summary>
public static TResult Map<T, TResult>(this T obj, Func<T, TResult> mapper, TResult fallback = default)
{
if (obj == null)
{
return fallback;
}
return mapper(obj);
}
/// <summary>
/// Masks sensitive strings (like passwords, tokens). Keeps first and last 2 characters visible.
/// </summary>
public static string MaskSensitive(this string str)
{
if (string.IsNullOrEmpty(str) || str.Length <= 4)
{
return "****";
}
int len = str.Length - 4;
return str.Substring(0, 2) + new string('*', len) + str.Substring(str.Length - 2, 2);
}
/// <summary>
/// Masks sensitive data in any object property that matches a keyword.
/// </summary>
public static void MaskProperties(this object obj, params string[] keywords)
{
if (obj == null || keywords == null || keywords.Length == 0)
{
return;
}
var props = obj.GetType().GetProperties();
foreach (var p in props)
{
if (!p.CanRead || !p.CanWrite)
{
continue;
}
if (keywords.Any(k => p.Name.IndexOf(k, StringComparison.OrdinalIgnoreCase) >= 0))
{
var val = p.GetValue(obj) as string;
if (!string.IsNullOrEmpty(val))
{
p.SetValue(obj, val.MaskSensitive());
}
}
}
}
}
}

View File

@@ -0,0 +1,260 @@
using System;
using System.IO;
namespace EonaCat.LogStack.Extensions;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public class OffsetStream : Stream
{
private const int BufferSize = 4096;
public OffsetStream(Stream stream, long offset = 0, long length = 0, bool readOnly = false, bool ownStream = false)
{
if (stream.CanSeek)
{
if (offset > stream.Length)
{
throw new EndOfStreamException();
}
BaseStreamOffset = offset;
if (length > stream.Length - offset)
{
throw new EndOfStreamException();
}
if (length == 0)
{
Length1 = stream.Length - offset;
}
else
{
Length1 = length;
}
}
else
{
BaseStreamOffset = 0;
Length1 = length;
}
BaseStream = stream;
ReadOnly = readOnly;
OwnStream = ownStream;
}
public override bool CanRead => BaseStream.CanRead;
public override bool CanSeek => BaseStream.CanSeek;
public override bool CanWrite => BaseStream.CanWrite && !ReadOnly;
public override long Length => Length1;
public override long Position
{
get => Position1;
set
{
if (value > Length1)
{
throw new EndOfStreamException();
}
if (!BaseStream.CanSeek)
{
throw new NotSupportedException("Cannot seek stream.");
}
Position1 = value;
}
}
public long BaseStreamOffset { get; private set; }
public Stream BaseStream { get; }
public long Length1 { get; set; }
public long Position1 { get; set; }
public bool ReadOnly { get; }
public bool Disposed { get; set; }
public bool OwnStream { get; }
protected override void Dispose(bool disposing)
{
if (Disposed)
{
return;
}
if (disposing)
{
if (OwnStream & (BaseStream != null))
{
BaseStream.Dispose();
}
}
Disposed = true;
base.Dispose(disposing);
}
public override void Flush()
{
if (ReadOnly)
{
throw new IOException("OffsetStream is read only.");
}
BaseStream.Flush();
}
public override int Read(byte[] buffer, int offset, int count)
{
if (count < 1)
{
throw new ArgumentOutOfRangeException("Count cannot be less than 1.");
}
if (Position1 >= Length1)
{
return 0;
}
if (count > Length1 - Position1)
{
count = Convert.ToInt32(Length1 - Position1);
}
if (BaseStream.CanSeek)
{
BaseStream.Position = BaseStreamOffset + Position1;
}
var bytesRead = BaseStream.Read(buffer, offset, count);
Position1 += bytesRead;
return bytesRead;
}
public override long Seek(long offset, SeekOrigin origin)
{
if (!BaseStream.CanSeek)
{
throw new IOException("Stream is not seekable.");
}
long pos;
switch (origin)
{
case SeekOrigin.Begin:
pos = offset;
break;
case SeekOrigin.Current:
pos = Position1 + offset;
break;
case SeekOrigin.End:
pos = Length1 + offset;
break;
default:
pos = 0;
break;
}
if (pos < 0 || pos >= Length1)
{
throw new EndOfStreamException("OffsetStream reached begining/end of stream.");
}
Position1 = pos;
return pos;
}
public override void SetLength(long value)
{
if (ReadOnly)
{
throw new IOException("OffsetStream is read only.");
}
BaseStream.SetLength(BaseStreamOffset + value);
Length1 = value;
}
public override void Write(byte[] buffer, int offset, int count)
{
if (ReadOnly)
{
throw new IOException("OffsetStream is read only.");
}
if (count < 1)
{
return;
}
var pos = Position1 + count;
if (pos > Length1)
{
throw new EndOfStreamException("OffsetStream reached end of stream.");
}
if (BaseStream.CanSeek)
{
BaseStream.Position = BaseStreamOffset + Position1;
}
BaseStream.Write(buffer, offset, count);
Position1 = pos;
}
public void Reset(long offset, long length, long position)
{
BaseStreamOffset = offset;
Length1 = length;
Position1 = position;
}
public void WriteTo(Stream stream)
{
WriteTo(stream, BufferSize);
}
public void WriteTo(Stream stream, int bufferSize)
{
if (!BaseStream.CanSeek)
{
throw new IOException("Stream is not seekable.");
}
if (Length1 < bufferSize)
{
bufferSize = Convert.ToInt32(Length1);
}
var previousPosition = Position1;
Position1 = 0;
try
{
CopyTo(stream, bufferSize);
}
finally
{
Position1 = previousPosition;
}
}
}

View File

@@ -0,0 +1,140 @@
using System;
using System.Drawing;
using System.Globalization;
namespace EonaCat.LogStack.Helpers;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
public static class ColorHelper
{
public static string ColorToHexString(Color c)
{
return "#" + c.R.ToString("X2") + c.G.ToString("X2") + c.B.ToString("X2");
}
public static string ColorToRGBString(Color c)
{
return "RGB(" + c.R + "," + c.G + "," + c.B + ")";
}
public static Color ConsoleColorToColor(this ConsoleColor consoleColor)
{
switch (consoleColor)
{
case ConsoleColor.Black:
return Color.Black;
case ConsoleColor.DarkBlue:
return HexStringToColor("#000080");
case ConsoleColor.DarkGreen:
return HexStringToColor("#008000");
case ConsoleColor.DarkCyan:
return HexStringToColor("#008080");
case ConsoleColor.DarkRed:
return HexStringToColor("#800000");
case ConsoleColor.DarkMagenta:
return HexStringToColor("#800080");
case ConsoleColor.DarkYellow:
return HexStringToColor("#808000");
case ConsoleColor.Gray:
return HexStringToColor("#C0C0C0");
case ConsoleColor.DarkGray:
return HexStringToColor("#808080");
case ConsoleColor.Blue:
return Color.Blue;
case ConsoleColor.Green:
return Color.Lime;
case ConsoleColor.Cyan:
return Color.Cyan;
case ConsoleColor.Red:
return Color.Red;
case ConsoleColor.Magenta:
return Color.Magenta;
case ConsoleColor.Yellow:
return Color.Yellow;
case ConsoleColor.White:
return Color.White;
default:
throw new NotSupportedException();
}
}
public static Color HexStringToColor(string htmlColor, bool requireHexSpecified = false, int defaultAlpha = 0xFF)
{
return Color.FromArgb(HexColorToArgb(htmlColor, requireHexSpecified, defaultAlpha));
}
public static int HexColorToArgb(string htmlColor, bool requireHexSpecified = false, int defaultAlpha = 0xFF)
{
if (string.IsNullOrEmpty(htmlColor))
{
throw new ArgumentNullException(nameof(htmlColor));
}
if (!htmlColor.StartsWith("#") && requireHexSpecified)
{
throw new ArgumentException($"Provided parameter '{htmlColor}' is not valid");
}
htmlColor = htmlColor.TrimStart('#');
var symbolCount = htmlColor.Length;
var value = int.Parse(htmlColor, NumberStyles.HexNumber);
switch (symbolCount)
{
case 3: // RGB short hand
{
return (defaultAlpha << 24)
| (value & 0xF)
| ((value & 0xF) << 4)
| ((value & 0xF0) << 4)
| ((value & 0xF0) << 8)
| ((value & 0xF00) << 8)
| ((value & 0xF00) << 12)
;
}
case 4: // RGBA short hand
{
// Inline alpha swap
return ((value & 0xF) << 24)
| ((value & 0xF) << 28)
| ((value & 0xF0) >> 4)
| (value & 0xF0)
| (value & 0xF00)
| ((value & 0xF00) << 4)
| ((value & 0xF000) << 4)
| ((value & 0xF000) << 8)
;
}
case 6: // RGB complete definition
{
return (defaultAlpha << 24) | value;
}
case 8: // RGBA complete definition
{
// Alpha swap
return ((value & 0xFF) << 24) | (value >> 8);
}
default:
throw new FormatException("Invalid HTML Color");
}
}
}

View File

@@ -0,0 +1,52 @@
using System;
using System.Collections.Generic;
namespace EonaCat.LogStack.Helpers;
// This file is part of the EonaCat project(s) which is released under the Apache License.
// See the LICENSE file or go to https://EonaCat.com/License for full license details.
internal static class EnumHelper<T>
where T : struct
{
static EnumHelper()
{
var names = Enum.GetNames(typeof(T));
var values = (T[])Enum.GetValues(typeof(T));
Names = new Dictionary<T, string>(names.Length);
Values = new Dictionary<string, T>(names.Length * 2);
for (var i = 0; i < names.Length; i++)
{
Names[values[i]] = names[i];
Values[names[i]] = values[i];
Values[names[i].ToLower()] = values[i];
}
}
public static Dictionary<T, string> Names { get; }
public static Dictionary<string, T> Values { get; }
public static string ToString(T value)
{
return Names.TryGetValue(value, out var result) ? result : Convert.ToInt64(value).ToString();
}
public static bool TryParse(string input, bool ignoreCase, out T value)
{
if (string.IsNullOrEmpty(input))
{
value = default;
return false;
}
return Values.TryGetValue(ignoreCase ? input.ToLower() : input, out value);
}
internal static T Parse(string input, bool ignoreCase, T defaultValue)
{
return TryParse(input, ignoreCase, out var result) ? result : defaultValue;
}
}

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More