MarketAlly.AIPlugin.Extensions/MarketAlly.AIPlugin.Refacto.../Performance/MemoryEfficientFileProcesso...

183 lines
6.7 KiB
C#
Executable File

using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using System;
using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace MarketAlly.AIPlugin.Refactoring.Performance
{
public interface IMemoryPressureMonitor
{
Task<bool> ShouldUseStreamingAsync(string filePath);
long GetCurrentMemoryUsage();
double GetMemoryPressureRatio();
}
public class MemoryPressureMonitor : IMemoryPressureMonitor
{
private const long StreamingThresholdBytes = 5 * 1024 * 1024; // 5MB
private const double HighMemoryPressureThreshold = 0.8; // 80%
public async Task<bool> ShouldUseStreamingAsync(string filePath)
{
var fileInfo = new FileInfo(filePath);
// Use streaming for large files or when memory pressure is high
return fileInfo.Length > StreamingThresholdBytes ||
GetMemoryPressureRatio() > HighMemoryPressureThreshold;
}
public long GetCurrentMemoryUsage()
{
using var process = Process.GetCurrentProcess();
return process.WorkingSet64;
}
public double GetMemoryPressureRatio()
{
var totalMemory = GC.GetTotalMemory(false);
var availableMemory = GC.GetTotalMemory(true); // Force collection to get accurate reading
// Simple heuristic - in production, you'd use more sophisticated metrics
return (double)totalMemory / (totalMemory + availableMemory);
}
}
public class ProcessingResult
{
public bool Success { get; set; }
public SyntaxTree? SyntaxTree { get; set; }
public string? Content { get; set; }
public string? Error { get; set; }
public long ProcessingTimeMs { get; set; }
public long MemoryUsedBytes { get; set; }
public bool UsedStreaming { get; set; }
}
public class MemoryEfficientFileProcessor
{
private readonly IMemoryPressureMonitor _memoryMonitor;
private static readonly SemaphoreSlim _concurrencyLimiter = new(Environment.ProcessorCount);
public MemoryEfficientFileProcessor(IMemoryPressureMonitor? memoryMonitor = null)
{
_memoryMonitor = memoryMonitor ?? new MemoryPressureMonitor();
}
public async Task<ProcessingResult> ProcessLargeFileAsync(string filePath, CancellationToken cancellationToken = default)
{
await _concurrencyLimiter.WaitAsync(cancellationToken);
try
{
var stopwatch = Stopwatch.StartNew();
var initialMemory = _memoryMonitor.GetCurrentMemoryUsage();
if (await _memoryMonitor.ShouldUseStreamingAsync(filePath))
{
return await ProcessFileStreamingAsync(filePath, stopwatch, initialMemory, cancellationToken);
}
return await ProcessFileInMemoryAsync(filePath, stopwatch, initialMemory, cancellationToken);
}
finally
{
_concurrencyLimiter.Release();
}
}
private async Task<ProcessingResult> ProcessFileStreamingAsync(
string filePath,
Stopwatch stopwatch,
long initialMemory,
CancellationToken cancellationToken)
{
try
{
// For very large files, we'll use a streaming approach
// This is a simplified implementation - in practice, you'd need
// more sophisticated streaming parsing
using var fileStream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read,
bufferSize: 64 * 1024, useAsync: true);
using var reader = new StreamReader(fileStream);
var content = await reader.ReadToEndAsync(cancellationToken);
// Parse in chunks if content is extremely large
var syntaxTree = CSharpSyntaxTree.ParseText(content, path: filePath, cancellationToken: cancellationToken);
stopwatch.Stop();
var finalMemory = _memoryMonitor.GetCurrentMemoryUsage();
return new ProcessingResult
{
Success = true,
SyntaxTree = syntaxTree,
Content = content,
ProcessingTimeMs = stopwatch.ElapsedMilliseconds,
MemoryUsedBytes = finalMemory - initialMemory,
UsedStreaming = true
};
}
catch (Exception ex)
{
stopwatch.Stop();
return new ProcessingResult
{
Success = false,
Error = ex.Message,
ProcessingTimeMs = stopwatch.ElapsedMilliseconds,
UsedStreaming = true
};
}
}
private async Task<ProcessingResult> ProcessFileInMemoryAsync(
string filePath,
Stopwatch stopwatch,
long initialMemory,
CancellationToken cancellationToken)
{
try
{
var content = await File.ReadAllTextAsync(filePath, cancellationToken);
var syntaxTree = CSharpSyntaxTree.ParseText(content, path: filePath, cancellationToken: cancellationToken);
stopwatch.Stop();
var finalMemory = _memoryMonitor.GetCurrentMemoryUsage();
return new ProcessingResult
{
Success = true,
SyntaxTree = syntaxTree,
Content = content,
ProcessingTimeMs = stopwatch.ElapsedMilliseconds,
MemoryUsedBytes = finalMemory - initialMemory,
UsedStreaming = false
};
}
catch (Exception ex)
{
stopwatch.Stop();
return new ProcessingResult
{
Success = false,
Error = ex.Message,
ProcessingTimeMs = stopwatch.ElapsedMilliseconds,
UsedStreaming = false
};
}
}
public async Task<ProcessingResult[]> ProcessMultipleFilesAsync(
IEnumerable<string> filePaths,
CancellationToken cancellationToken = default)
{
var tasks = filePaths.Select(path => ProcessLargeFileAsync(path, cancellationToken));
return await Task.WhenAll(tasks);
}
}
}