using Microsoft.Extensions.Logging;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Runtime.Caching;
using System.Threading;
using System.Threading.Tasks;
namespace MarketAlly.AIPlugin.Analysis.Infrastructure
{
///
/// Performance optimization utilities including caching and parallel processing
///
public class PerformanceOptimization
{
private readonly MemoryCache _cache;
private readonly ILogger? _logger;
private readonly SemaphoreSlim _cacheLock = new(1, 1);
public PerformanceOptimization(ILogger? logger = null)
{
_logger = logger;
_cache = new MemoryCache("AnalysisCache");
}
///
/// Executes operations in parallel with controlled concurrency
///
public async Task> ExecuteInParallelAsync(
IEnumerable inputs,
Func> operation,
int maxConcurrency = 0,
CancellationToken cancellationToken = default)
{
if (maxConcurrency <= 0)
maxConcurrency = Environment.ProcessorCount;
var semaphore = new SemaphoreSlim(maxConcurrency, maxConcurrency);
var results = new ConcurrentBag();
var tasks = new List();
_logger?.LogDebug("Starting parallel execution with max concurrency: {MaxConcurrency}", maxConcurrency);
foreach (var input in inputs)
{
tasks.Add(ProcessItemAsync(input, operation, semaphore, results, cancellationToken));
}
await Task.WhenAll(tasks);
_logger?.LogDebug("Completed parallel execution of {TaskCount} tasks", tasks.Count);
return results;
}
///
/// Gets or sets a cached value with automatic invalidation
///
public async Task GetOrSetCacheAsync(
string key,
Func> factory,
TimeSpan? expiration = null,
CancellationToken cancellationToken = default)
{
var actualExpiration = expiration ?? TimeSpan.FromMinutes(30);
// Try to get from cache first
if (_cache.Get(key) is T cachedValue)
{
_logger?.LogDebug("Cache hit for key: {CacheKey}", key);
return cachedValue;
}
await _cacheLock.WaitAsync(cancellationToken);
try
{
// Double-check after acquiring lock
if (_cache.Get(key) is T doubleCheckedValue)
{
_logger?.LogDebug("Cache hit after lock for key: {CacheKey}", key);
return doubleCheckedValue;
}
_logger?.LogDebug("Cache miss for key: {CacheKey}, executing factory", key);
var value = await factory();
var policy = new CacheItemPolicy
{
AbsoluteExpiration = DateTimeOffset.UtcNow.Add(actualExpiration),
Priority = CacheItemPriority.Default,
RemovedCallback = (args) =>
{
_logger?.LogDebug("Cache item removed: {CacheKey}, Reason: {Reason}",
args.CacheItem.Key, args.RemovedReason);
}
};
_cache.Set(key, value, policy);
_logger?.LogDebug("Cached value for key: {CacheKey} with expiration: {Expiration}",
key, actualExpiration);
return value;
}
finally
{
_cacheLock.Release();
}
}
///
/// Invalidates cache entries by pattern
///
public async Task InvalidateCacheAsync(string keyPattern)
{
await _cacheLock.WaitAsync();
try
{
var keysToRemove = new List();
foreach (var item in _cache)
{
if (item.Key.Contains(keyPattern))
{
keysToRemove.Add(item.Key);
}
}
foreach (var key in keysToRemove)
{
_cache.Remove(key);
_logger?.LogDebug("Removed cache key: {CacheKey}", key);
}
_logger?.LogInformation("Invalidated {Count} cache entries matching pattern: {Pattern}",
keysToRemove.Count, keyPattern);
}
finally
{
_cacheLock.Release();
}
}
///
/// Batches operations for more efficient processing
///
public async Task> ExecuteInBatchesAsync(
IEnumerable inputs,
Func, Task>> batchOperation,
int batchSize = 100,
CancellationToken cancellationToken = default)
{
var results = new List();
var batch = new List(batchSize);
_logger?.LogDebug("Starting batch processing with batch size: {BatchSize}", batchSize);
foreach (var input in inputs)
{
batch.Add(input);
if (batch.Count >= batchSize)
{
var batchResults = await batchOperation(batch);
results.AddRange(batchResults);
_logger?.LogDebug("Processed batch of {BatchSize} items", batch.Count);
batch.Clear();
cancellationToken.ThrowIfCancellationRequested();
}
}
// Process remaining items
if (batch.Count > 0)
{
var batchResults = await batchOperation(batch);
results.AddRange(batchResults);
_logger?.LogDebug("Processed final batch of {BatchSize} items", batch.Count);
}
_logger?.LogInformation("Completed batch processing of {TotalCount} items", results.Count);
return results;
}
///
/// Manages object pooling for expensive-to-create objects
///
public ObjectPool CreateObjectPool(
Func factory,
Action? resetAction = null,
int maxSize = 10) where T : class
{
return new ObjectPool(factory, resetAction, maxSize, _logger);
}
///
/// Optimizes memory usage by implementing weak references for large objects
///
public WeakReferenceCache CreateWeakReferenceCache() where T : class
{
return new WeakReferenceCache(_logger);
}
///
/// Gets cache statistics for monitoring
///
public CacheStatistics GetCacheStatistics()
{
var stats = new CacheStatistics();
foreach (var item in _cache)
{
stats.TotalItems++;
if (item.Value != null)
{
stats.EstimatedSize += EstimateObjectSize(item.Value);
}
}
return stats;
}
private async Task ProcessItemAsync(
TInput input,
Func> operation,
SemaphoreSlim semaphore,
ConcurrentBag results,
CancellationToken cancellationToken)
{
await semaphore.WaitAsync(cancellationToken);
try
{
var result = await operation(input);
results.Add(result);
}
finally
{
semaphore.Release();
}
}
private static long EstimateObjectSize(object obj)
{
// Simple size estimation - in practice, you might want to use more sophisticated methods
return obj switch
{
string str => str.Length * 2, // Unicode characters are 2 bytes
byte[] bytes => bytes.Length,
_ => 64 // Default estimate for other objects
};
}
public void Dispose()
{
_cache?.Dispose();
_cacheLock?.Dispose();
}
}
///
/// Object pool for managing expensive-to-create objects
///
public class ObjectPool where T : class
{
private readonly ConcurrentQueue _objects = new();
private readonly Func _factory;
private readonly Action? _resetAction;
private readonly int _maxSize;
private readonly ILogger? _logger;
private int _currentSize;
public ObjectPool(Func factory, Action? resetAction, int maxSize, ILogger? logger)
{
_factory = factory ?? throw new ArgumentNullException(nameof(factory));
_resetAction = resetAction;
_maxSize = maxSize;
_logger = logger;
}
public T Get()
{
if (_objects.TryDequeue(out var obj))
{
Interlocked.Decrement(ref _currentSize);
_logger?.LogDebug("Retrieved object from pool, current size: {CurrentSize}", _currentSize);
return obj;
}
_logger?.LogDebug("Creating new object, pool was empty");
return _factory();
}
public void Return(T obj)
{
if (obj == null) return;
if (_currentSize < _maxSize)
{
_resetAction?.Invoke(obj);
_objects.Enqueue(obj);
Interlocked.Increment(ref _currentSize);
_logger?.LogDebug("Returned object to pool, current size: {CurrentSize}", _currentSize);
}
else
{
_logger?.LogDebug("Pool is full, discarding object");
}
}
public int Count => _currentSize;
}
///
/// Weak reference cache for memory-efficient caching of large objects
///
public class WeakReferenceCache where T : class
{
private readonly ConcurrentDictionary _cache = new();
private readonly ILogger? _logger;
public WeakReferenceCache(ILogger? logger)
{
_logger = logger;
}
public void Set(string key, T value)
{
_cache[key] = new WeakReference(value);
_logger?.LogDebug("Added weak reference for key: {Key}", key);
}
public T? Get(string key)
{
if (_cache.TryGetValue(key, out var weakRef) && weakRef.Target is T value)
{
_logger?.LogDebug("Weak reference cache hit for key: {Key}", key);
return value;
}
// Clean up dead reference
if (weakRef?.Target == null)
{
_cache.TryRemove(key, out _);
_logger?.LogDebug("Cleaned up dead weak reference for key: {Key}", key);
}
return null;
}
public void Remove(string key)
{
_cache.TryRemove(key, out _);
_logger?.LogDebug("Removed weak reference for key: {Key}", key);
}
public int Count => _cache.Count;
}
///
/// Cache statistics for monitoring performance
///
public class CacheStatistics
{
public int TotalItems { get; set; }
public long EstimatedSize { get; set; }
public DateTime LastUpdated { get; set; } = DateTime.UtcNow;
}
}