MarketAlly.AIPlugin.Extensions/MarketAlly.AIPlugin.DevOps/PipelineOptimizerPlugin.cs

1014 lines
33 KiB
C#
Executable File

using MarketAlly.AIPlugin;
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using YamlDotNet.Serialization;
using YamlDotNet.Serialization.NamingConventions;
namespace MarketAlly.AIPlugin.DevOps.Plugins
{
[AIPlugin("PipelineOptimizer", "Optimizes build and deployment pipeline efficiency and performance")]
public class PipelineOptimizerPlugin : IAIPlugin
{
private readonly ILogger<PipelineOptimizerPlugin> _logger;
private readonly IDeserializer _yamlDeserializer;
public PipelineOptimizerPlugin(ILogger<PipelineOptimizerPlugin> logger = null)
{
_logger = logger;
_yamlDeserializer = new DeserializerBuilder()
.WithNamingConvention(HyphenatedNamingConvention.Instance)
.IgnoreUnmatchedProperties()
.Build();
}
[AIParameter("Full path to the pipeline configuration", required: true)]
public string PipelineConfig { get; set; }
[AIParameter("Analyze build time optimization", required: false)]
public bool OptimizeBuildTime { get; set; } = true;
[AIParameter("Check for parallel execution opportunities", required: false)]
public bool CheckParallelization { get; set; } = true;
[AIParameter("Analyze resource utilization", required: false)]
public bool AnalyzeResources { get; set; } = true;
[AIParameter("Check for unnecessary steps", required: false)]
public bool CheckUnnecessarySteps { get; set; } = true;
[AIParameter("Generate optimized pipeline configuration", required: false)]
public bool GenerateOptimized { get; set; } = false;
public IReadOnlyDictionary<string, Type> SupportedParameters => new Dictionary<string, Type>
{
["pipelineConfig"] = typeof(string),
["optimizeBuildTime"] = typeof(bool),
["checkParallelization"] = typeof(bool),
["analyzeResources"] = typeof(bool),
["checkUnnecessarySteps"] = typeof(bool),
["generateOptimized"] = typeof(bool)
};
public async Task<AIPluginResult> ExecuteAsync(IReadOnlyDictionary<string, object> parameters)
{
try
{
_logger?.LogInformation("PipelineOptimizer plugin executing");
// Extract parameters
string pipelineConfig = parameters["pipelineConfig"].ToString();
bool optimizeBuildTime = parameters.TryGetValue("optimizeBuildTime", out var buildObj) && Convert.ToBoolean(buildObj);
bool checkParallelization = parameters.TryGetValue("checkParallelization", out var parallelObj) && Convert.ToBoolean(parallelObj);
bool analyzeResources = parameters.TryGetValue("analyzeResources", out var resourceObj) && Convert.ToBoolean(resourceObj);
bool checkUnnecessarySteps = parameters.TryGetValue("checkUnnecessarySteps", out var stepsObj) && Convert.ToBoolean(stepsObj);
bool generateOptimized = parameters.TryGetValue("generateOptimized", out var genObj) && Convert.ToBoolean(genObj);
// Validate pipeline file exists
if (!File.Exists(pipelineConfig))
{
return new AIPluginResult(
new FileNotFoundException($"Pipeline configuration not found: {pipelineConfig}"),
"Pipeline configuration not found"
);
}
// Parse pipeline configuration
var content = await File.ReadAllTextAsync(pipelineConfig);
var pipelineType = DetectPipelineType(pipelineConfig);
var pipelineData = await ParsePipelineAsync(content, pipelineType);
var optimizationResult = new PipelineOptimizationResult
{
PipelineConfig = pipelineConfig,
PipelineType = pipelineType,
OriginalStepCount = pipelineData.TotalSteps,
OriginalJobCount = pipelineData.TotalJobs
};
// Perform optimization analysis
if (optimizeBuildTime)
{
AnalyzeBuildTimeOptimizations(pipelineData, optimizationResult);
}
if (checkParallelization)
{
AnalyzeParallelizationOpportunities(pipelineData, optimizationResult);
}
if (analyzeResources)
{
AnalyzeResourceUtilization(pipelineData, optimizationResult);
}
if (checkUnnecessarySteps)
{
AnalyzeUnnecessarySteps(pipelineData, optimizationResult);
}
// Generate optimized configuration if requested
string optimizedConfig = null;
if (generateOptimized)
{
optimizedConfig = GenerateOptimizedPipeline(pipelineData, optimizationResult);
}
// Calculate performance metrics
var performanceMetrics = CalculatePerformanceMetrics(pipelineData, optimizationResult);
var result = new
{
Message = "Pipeline optimization completed",
PipelineConfig = pipelineConfig,
PipelineType = pipelineType,
OriginalMetrics = new
{
JobCount = optimizationResult.OriginalJobCount,
StepCount = optimizationResult.OriginalStepCount,
EstimatedBuildTime = performanceMetrics.EstimatedOriginalBuildTime,
ParallelJobs = performanceMetrics.CurrentParallelJobs
},
BuildTimeOptimizations = optimizationResult.BuildTimeOptimizations,
ParallelizationOpportunities = optimizationResult.ParallelizationOpportunities,
ResourceOptimizations = optimizationResult.ResourceOptimizations,
UnnecessarySteps = optimizationResult.UnnecessarySteps,
OptimizedConfig = optimizedConfig,
PerformanceMetrics = performanceMetrics,
Summary = new
{
TotalOptimizations = optimizationResult.BuildTimeOptimizations.Count +
optimizationResult.ParallelizationOpportunities.Count +
optimizationResult.ResourceOptimizations.Count +
optimizationResult.UnnecessarySteps.Count,
EstimatedTimeSaving = performanceMetrics.EstimatedTimeSaving,
EstimatedCostSaving = performanceMetrics.EstimatedCostSaving,
OptimizationScore = CalculateOptimizationScore(optimizationResult)
}
};
_logger?.LogInformation("Pipeline optimization completed. Found {TotalOptimizations} optimization opportunities with {TimeSaving} estimated time saving",
result.Summary.TotalOptimizations, result.Summary.EstimatedTimeSaving);
return new AIPluginResult(result);
}
catch (Exception ex)
{
_logger?.LogError(ex, "Failed to optimize pipeline");
return new AIPluginResult(ex, "Failed to optimize pipeline");
}
}
private string DetectPipelineType(string filePath)
{
var fileName = Path.GetFileName(filePath);
var directory = Path.GetDirectoryName(filePath);
if (directory?.Contains(".github/workflows") == true) return "github";
if (fileName.StartsWith("azure-pipelines", StringComparison.OrdinalIgnoreCase)) return "azure";
if (fileName.Equals(".gitlab-ci.yml", StringComparison.OrdinalIgnoreCase)) return "gitlab";
if (fileName.Equals("Jenkinsfile", StringComparison.OrdinalIgnoreCase)) return "jenkins";
return "unknown";
}
private async Task<PipelineData> ParsePipelineAsync(string content, string pipelineType)
{
var pipelineData = new PipelineData { PipelineType = pipelineType };
try
{
switch (pipelineType)
{
case "github":
await ParseGitHubActionsAsync(content, pipelineData);
break;
case "azure":
await ParseAzureDevOpsAsync(content, pipelineData);
break;
case "gitlab":
await ParseGitLabCIAsync(content, pipelineData);
break;
case "jenkins":
await ParseJenkinsAsync(content, pipelineData);
break;
default:
await ParseGenericPipelineAsync(content, pipelineData);
break;
}
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to parse pipeline configuration");
pipelineData.ParseErrors.Add($"Failed to parse pipeline: {ex.Message}");
}
return pipelineData;
}
private async Task ParseGitHubActionsAsync(string content, PipelineData pipelineData)
{
var workflow = _yamlDeserializer.Deserialize<GitHubWorkflow>(content);
if (workflow?.Jobs != null)
{
foreach (var jobKvp in workflow.Jobs)
{
var job = new PipelineJob
{
Name = jobKvp.Value.Name ?? jobKvp.Key,
Id = jobKvp.Key,
RunsOn = jobKvp.Value.RunsOn,
TimeoutMinutes = jobKvp.Value.TimeoutMinutes,
Dependencies = jobKvp.Value.Needs ?? new List<string>(),
Strategy = jobKvp.Value.Strategy?.Matrix != null ? "matrix" : "single"
};
if (jobKvp.Value.Steps != null)
{
foreach (var step in jobKvp.Value.Steps)
{
var pipelineStep = new PipelineStep
{
Name = step.Name ?? "Unnamed Step",
Type = !string.IsNullOrEmpty(step.Uses) ? "action" : "script",
Action = step.Uses,
Script = step.Run,
EstimatedDuration = EstimateStepDuration(step)
};
job.Steps.Add(pipelineStep);
}
}
pipelineData.Jobs.Add(job);
}
}
pipelineData.TotalJobs = pipelineData.Jobs.Count;
pipelineData.TotalSteps = pipelineData.Jobs.Sum(j => j.Steps.Count);
}
private async Task ParseAzureDevOpsAsync(string content, PipelineData pipelineData)
{
// Basic Azure DevOps pipeline parsing
var lines = content.Split('\n');
var currentJob = new PipelineJob { Name = "default", Id = "default" };
foreach (var line in lines)
{
var trimmedLine = line.Trim();
if (trimmedLine.StartsWith("- task:") || trimmedLine.StartsWith("- script:"))
{
var step = new PipelineStep
{
Name = ExtractTaskName(trimmedLine),
Type = trimmedLine.Contains("task:") ? "task" : "script",
EstimatedDuration = 60 // Default estimate
};
currentJob.Steps.Add(step);
}
}
if (currentJob.Steps.Any())
{
pipelineData.Jobs.Add(currentJob);
}
pipelineData.TotalJobs = pipelineData.Jobs.Count;
pipelineData.TotalSteps = pipelineData.Jobs.Sum(j => j.Steps.Count);
}
private async Task ParseGitLabCIAsync(string content, PipelineData pipelineData)
{
// Basic GitLab CI parsing
var yaml = _yamlDeserializer.Deserialize<Dictionary<string, object>>(content);
foreach (var kvp in yaml)
{
if (kvp.Key.StartsWith(".") || kvp.Key == "stages" || kvp.Key == "variables")
continue;
var job = new PipelineJob
{
Name = kvp.Key,
Id = kvp.Key
};
// Basic step estimation for GitLab CI
if (kvp.Value is Dictionary<string, object> jobData)
{
if (jobData.ContainsKey("script"))
{
var scriptStep = new PipelineStep
{
Name = "Execute Script",
Type = "script",
EstimatedDuration = 120
};
job.Steps.Add(scriptStep);
}
}
pipelineData.Jobs.Add(job);
}
pipelineData.TotalJobs = pipelineData.Jobs.Count;
pipelineData.TotalSteps = pipelineData.Jobs.Sum(j => j.Steps.Count);
}
private async Task ParseJenkinsAsync(string content, PipelineData pipelineData)
{
// Basic Jenkins pipeline parsing (simplified)
var stageMatches = Regex.Matches(content, @"stage\s*\(\s*['""]([^'""]+)['""]", RegexOptions.IgnoreCase);
foreach (Match match in stageMatches)
{
var job = new PipelineJob
{
Name = match.Groups[1].Value,
Id = match.Groups[1].Value.Replace(" ", "_")
};
// Add a default step for each stage
job.Steps.Add(new PipelineStep
{
Name = "Stage Execution",
Type = "script",
EstimatedDuration = 180
});
pipelineData.Jobs.Add(job);
}
pipelineData.TotalJobs = pipelineData.Jobs.Count;
pipelineData.TotalSteps = pipelineData.Jobs.Sum(j => j.Steps.Count);
}
private async Task ParseGenericPipelineAsync(string content, PipelineData pipelineData)
{
// Generic parsing for unknown pipeline types
var lines = content.Split('\n');
var stepCount = 0;
foreach (var line in lines)
{
if (line.Trim().StartsWith("run:") || line.Trim().StartsWith("script:"))
{
stepCount++;
}
}
if (stepCount > 0)
{
var job = new PipelineJob { Name = "Generic Job", Id = "generic" };
for (int i = 0; i < stepCount; i++)
{
job.Steps.Add(new PipelineStep
{
Name = $"Step {i + 1}",
Type = "script",
EstimatedDuration = 60
});
}
pipelineData.Jobs.Add(job);
}
pipelineData.TotalJobs = pipelineData.Jobs.Count;
pipelineData.TotalSteps = stepCount;
}
private string ExtractTaskName(string line)
{
var match = Regex.Match(line, @"(?:task|script):\s*(.+)");
return match.Success ? match.Groups[1].Value.Trim() : "Unknown Task";
}
private int EstimateStepDuration(GitHubStep step)
{
if (!string.IsNullOrEmpty(step.Uses))
{
// Common action duration estimates (in seconds)
return step.Uses.ToLower() switch
{
var action when action.Contains("checkout") => 30,
var action when action.Contains("setup-node") => 60,
var action when action.Contains("setup-dotnet") => 90,
var action when action.Contains("cache") => 45,
var action when action.Contains("upload-artifact") => 120,
var action when action.Contains("download-artifact") => 60,
_ => 90
};
}
if (!string.IsNullOrEmpty(step.Run))
{
var script = step.Run.ToLower();
if (script.Contains("npm install") || script.Contains("yarn install")) return 300;
if (script.Contains("npm test") || script.Contains("yarn test")) return 240;
if (script.Contains("build") || script.Contains("compile")) return 480;
if (script.Contains("deploy")) return 180;
return 60;
}
return 60; // Default estimate
}
private void AnalyzeBuildTimeOptimizations(PipelineData pipelineData, PipelineOptimizationResult result)
{
// Check for caching opportunities
var hasCaching = pipelineData.Jobs.Any(j =>
j.Steps.Any(s => s.Action?.Contains("cache") == true || s.Script?.Contains("cache") == true));
if (!hasCaching)
{
result.BuildTimeOptimizations.Add(new BuildTimeOptimization
{
Type = "Caching",
Description = "No caching mechanism detected",
Recommendation = "Implement dependency caching to reduce download times",
EstimatedTimeSaving = "30-60% reduction in dependency installation time",
Implementation = "Add caching steps for package managers (npm, NuGet, Maven, etc.)"
});
}
// Check for incremental builds
var hasIncrementalBuild = pipelineData.Jobs.Any(j =>
j.Steps.Any(s => s.Script?.Contains("incremental") == true));
if (!hasIncrementalBuild)
{
result.BuildTimeOptimizations.Add(new BuildTimeOptimization
{
Type = "Incremental Build",
Description = "No incremental build configuration detected",
Recommendation = "Configure incremental builds to only rebuild changed components",
EstimatedTimeSaving = "40-70% reduction in build time for small changes",
Implementation = "Enable incremental compilation and change detection"
});
}
// Check for build artifact reuse
foreach (var job in pipelineData.Jobs)
{
var buildSteps = job.Steps.Where(s =>
s.Script?.Contains("build") == true || s.Script?.Contains("compile") == true).ToList();
if (buildSteps.Count > 1)
{
result.BuildTimeOptimizations.Add(new BuildTimeOptimization
{
Type = "Artifact Reuse",
Description = $"Multiple build steps detected in job '{job.Name}'",
Recommendation = "Build once and reuse artifacts across subsequent steps",
EstimatedTimeSaving = "20-40% reduction in redundant build time",
Implementation = "Use build artifacts and upload/download actions"
});
}
}
// Check for resource sizing
foreach (var job in pipelineData.Jobs)
{
if (string.IsNullOrEmpty(job.RunsOn) || job.RunsOn == "ubuntu-latest")
{
var estimatedDuration = job.Steps.Sum(s => s.EstimatedDuration);
if (estimatedDuration > 1800) // 30 minutes
{
result.BuildTimeOptimizations.Add(new BuildTimeOptimization
{
Type = "Resource Sizing",
Description = $"Long-running job '{job.Name}' using default runner",
Recommendation = "Consider using larger runner instances for compute-intensive jobs",
EstimatedTimeSaving = "25-50% reduction in execution time",
Implementation = "Use ubuntu-latest-4-cores or self-hosted runners with more resources"
});
}
}
}
}
private void AnalyzeParallelizationOpportunities(PipelineData pipelineData, PipelineOptimizationResult result)
{
// Analyze job dependencies
var independentJobs = pipelineData.Jobs.Where(j => !j.Dependencies.Any()).ToList();
var dependentJobs = pipelineData.Jobs.Where(j => j.Dependencies.Any()).ToList();
if (independentJobs.Count > 1)
{
var currentlySequential = independentJobs.Where(j => j.Strategy != "matrix").Count();
if (currentlySequential > 1)
{
result.ParallelizationOpportunities.Add(new ParallelizationOpportunity
{
Type = "Job Parallelization",
Description = $"{currentlySequential} independent jobs that could run in parallel",
JobsAffected = independentJobs.Select(j => j.Name).ToList(),
Recommendation = "Configure these jobs to run in parallel",
EstimatedTimeSaving = $"Reduce total pipeline time by {(currentlySequential - 1) * 100 / currentlySequential}%"
});
}
}
// Check for matrix strategy opportunities
foreach (var job in pipelineData.Jobs)
{
if (job.Strategy != "matrix")
{
var hasTestSteps = job.Steps.Any(s =>
s.Script?.Contains("test") == true || s.Name.ToLower().Contains("test"));
if (hasTestSteps)
{
result.ParallelizationOpportunities.Add(new ParallelizationOpportunity
{
Type = "Matrix Strategy",
Description = $"Job '{job.Name}' with tests could benefit from matrix builds",
JobsAffected = new List<string> { job.Name },
Recommendation = "Implement matrix strategy for multiple environments/versions",
EstimatedTimeSaving = "Run tests across multiple environments in parallel"
});
}
}
}
// Analyze step-level parallelization within jobs
foreach (var job in pipelineData.Jobs)
{
var independentSteps = new List<List<PipelineStep>>();
var currentGroup = new List<PipelineStep>();
foreach (var step in job.Steps)
{
// Simple heuristic: steps that don't build/compile can often be parallelized
if (step.Script?.Contains("test") == true && !step.Script.Contains("build"))
{
currentGroup.Add(step);
}
else
{
if (currentGroup.Count > 1)
{
independentSteps.Add(new List<PipelineStep>(currentGroup));
}
currentGroup.Clear();
}
}
if (currentGroup.Count > 1)
{
independentSteps.Add(currentGroup);
}
foreach (var group in independentSteps)
{
result.ParallelizationOpportunities.Add(new ParallelizationOpportunity
{
Type = "Step Parallelization",
Description = $"Job '{job.Name}' has {group.Count} steps that could run in parallel",
JobsAffected = new List<string> { job.Name },
Recommendation = "Split independent steps into separate parallel jobs",
EstimatedTimeSaving = $"Reduce job time by up to {group.Count * 100 / job.Steps.Count}%"
});
}
}
}
private void AnalyzeResourceUtilization(PipelineData pipelineData, PipelineOptimizationResult result)
{
// Analyze runner utilization
var runnerUsage = pipelineData.Jobs
.GroupBy(j => j.RunsOn ?? "default")
.ToDictionary(g => g.Key, g => g.ToList());
foreach (var runnerGroup in runnerUsage)
{
var jobs = runnerGroup.Value;
var totalDuration = jobs.Sum(j => j.Steps.Sum(s => s.EstimatedDuration));
if (totalDuration > 3600 && jobs.Count == 1) // 1 hour on single job
{
result.ResourceOptimizations.Add(new ResourceOptimization
{
Type = "Runner Efficiency",
Description = $"Long-running single job on {runnerGroup.Key}",
Recommendation = "Consider breaking down into smaller, parallel jobs",
ResourceType = runnerGroup.Key,
EstimatedCostSaving = "Reduce runner minutes through parallelization"
});
}
if (jobs.Count > 1 && jobs.All(j => !j.Dependencies.Any()))
{
result.ResourceOptimizations.Add(new ResourceOptimization
{
Type = "Resource Allocation",
Description = $"Multiple independent jobs on {runnerGroup.Key}",
Recommendation = "These jobs can run in parallel to utilize resources better",
ResourceType = runnerGroup.Key,
EstimatedCostSaving = "Reduce total pipeline execution time"
});
}
}
// Check for over-provisioned resources
foreach (var job in pipelineData.Jobs)
{
var hasSimpleSteps = job.Steps.All(s =>
s.EstimatedDuration < 300 && // Less than 5 minutes
!s.Script?.Contains("build") == true &&
!s.Script?.Contains("compile") == true);
if (hasSimpleSteps && (job.RunsOn?.Contains("large") == true || job.RunsOn?.Contains("4-core") == true))
{
result.ResourceOptimizations.Add(new ResourceOptimization
{
Type = "Resource Right-sizing",
Description = $"Job '{job.Name}' may be over-provisioned",
Recommendation = "Use standard runners for lightweight operations",
ResourceType = job.RunsOn,
EstimatedCostSaving = "30-50% cost reduction by using appropriate runner size"
});
}
}
}
private void AnalyzeUnnecessarySteps(PipelineData pipelineData, PipelineOptimizationResult result)
{
foreach (var job in pipelineData.Jobs)
{
// Check for redundant checkout steps
var checkoutSteps = job.Steps.Where(s =>
s.Action?.Contains("checkout") == true).ToList();
if (checkoutSteps.Count > 1)
{
result.UnnecessarySteps.Add(new UnnecessaryStep
{
Type = "Redundant Checkout",
Description = $"Job '{job.Name}' has {checkoutSteps.Count} checkout steps",
JobName = job.Name,
StepNames = checkoutSteps.Select(s => s.Name).ToList(),
Recommendation = "Remove redundant checkout steps, code is available throughout the job",
TimeSaved = (checkoutSteps.Count - 1) * 30
});
}
// Check for redundant setup steps
var setupSteps = job.Steps.Where(s =>
s.Action?.Contains("setup-") == true).ToList();
var setupTypes = setupSteps.GroupBy(s =>
s.Action?.Split('/').LastOrDefault()?.Split('@').FirstOrDefault())
.Where(g => g.Count() > 1)
.ToList();
foreach (var setupGroup in setupTypes)
{
result.UnnecessarySteps.Add(new UnnecessaryStep
{
Type = "Redundant Setup",
Description = $"Multiple setup steps for {setupGroup.Key}",
JobName = job.Name,
StepNames = setupGroup.Select(s => s.Name).ToList(),
Recommendation = "Consolidate setup steps or move to job preparation",
TimeSaved = (setupGroup.Count() - 1) * 60
});
}
// Check for empty or no-op steps
var emptySteps = job.Steps.Where(s =>
string.IsNullOrWhiteSpace(s.Script) &&
string.IsNullOrWhiteSpace(s.Action) &&
s.Name.ToLower().Contains("placeholder")).ToList();
if (emptySteps.Any())
{
result.UnnecessarySteps.Add(new UnnecessaryStep
{
Type = "Empty Steps",
Description = "Found placeholder or empty steps",
JobName = job.Name,
StepNames = emptySteps.Select(s => s.Name).ToList(),
Recommendation = "Remove placeholder steps",
TimeSaved = emptySteps.Count * 10
});
}
// Check for debug/development steps in production pipelines
var debugSteps = job.Steps.Where(s =>
s.Script?.Contains("echo") == true ||
s.Script?.Contains("debug") == true ||
s.Name.ToLower().Contains("debug")).ToList();
if (debugSteps.Any())
{
result.UnnecessarySteps.Add(new UnnecessaryStep
{
Type = "Debug Steps",
Description = "Found debug/echo steps that may not be needed in production",
JobName = job.Name,
StepNames = debugSteps.Select(s => s.Name).ToList(),
Recommendation = "Remove debug steps or make them conditional",
TimeSaved = debugSteps.Count * 5
});
}
}
}
private string GenerateOptimizedPipeline(PipelineData pipelineData, PipelineOptimizationResult result)
{
var optimized = new StringBuilder();
optimized.AppendLine("# Optimized Pipeline Configuration");
optimized.AppendLine("# Generated by MarketAlly.AIPlugin.DevOps");
optimized.AppendLine($"# Original pipeline type: {pipelineData.PipelineType}");
optimized.AppendLine($"# Optimization date: {DateTime.UtcNow:yyyy-MM-dd HH:mm:ss} UTC");
optimized.AppendLine();
if (pipelineData.PipelineType == "github")
{
optimized.AppendLine("name: Optimized CI/CD Pipeline");
optimized.AppendLine();
optimized.AppendLine("on:");
optimized.AppendLine(" push:");
optimized.AppendLine(" branches: [ main, develop ]");
optimized.AppendLine(" pull_request:");
optimized.AppendLine(" branches: [ main ]");
optimized.AppendLine();
// Add optimized permissions
optimized.AppendLine("permissions:");
optimized.AppendLine(" contents: read");
optimized.AppendLine(" checks: write");
optimized.AppendLine();
optimized.AppendLine("jobs:");
// Group independent jobs
var independentJobs = pipelineData.Jobs.Where(j => !j.Dependencies.Any()).ToList();
var dependentJobs = pipelineData.Jobs.Where(j => j.Dependencies.Any()).ToList();
foreach (var job in independentJobs)
{
GenerateOptimizedGitHubJob(job, optimized, result);
}
foreach (var job in dependentJobs)
{
GenerateOptimizedGitHubJob(job, optimized, result);
}
}
else
{
optimized.AppendLine("# Generic optimized pipeline structure");
optimized.AppendLine("# Please adapt to your specific CI/CD platform");
optimized.AppendLine();
foreach (var optimization in result.BuildTimeOptimizations)
{
optimized.AppendLine($"# Optimization: {optimization.Type}");
optimized.AppendLine($"# {optimization.Recommendation}");
optimized.AppendLine();
}
}
return optimized.ToString();
}
private void GenerateOptimizedGitHubJob(PipelineJob job, StringBuilder optimized, PipelineOptimizationResult result)
{
optimized.AppendLine($" {job.Id}:");
optimized.AppendLine($" name: {job.Name}");
optimized.AppendLine($" runs-on: {job.RunsOn ?? "ubuntu-latest"}");
if (job.TimeoutMinutes.HasValue)
{
optimized.AppendLine($" timeout-minutes: {job.TimeoutMinutes}");
}
else
{
optimized.AppendLine(" timeout-minutes: 30"); // Add default timeout
}
if (job.Dependencies.Any())
{
optimized.AppendLine($" needs: [{string.Join(", ", job.Dependencies)}]");
}
// Add matrix strategy if recommended
var matrixOpportunity = result.ParallelizationOpportunities
.FirstOrDefault(p => p.Type == "Matrix Strategy" && p.JobsAffected.Contains(job.Name));
if (matrixOpportunity != null)
{
optimized.AppendLine(" strategy:");
optimized.AppendLine(" matrix:");
optimized.AppendLine(" node-version: [16, 18, 20]");
optimized.AppendLine(" # Add other matrix dimensions as needed");
}
optimized.AppendLine(" steps:");
// Filter out unnecessary steps
var unnecessaryStepNames = result.UnnecessarySteps
.Where(u => u.JobName == job.Name)
.SelectMany(u => u.StepNames)
.ToHashSet();
var filteredSteps = job.Steps.Where(s => !unnecessaryStepNames.Contains(s.Name)).ToList();
// Add caching if recommended
var cachingOptimization = result.BuildTimeOptimizations
.FirstOrDefault(b => b.Type == "Caching");
if (cachingOptimization != null)
{
optimized.AppendLine(" - name: Cache dependencies");
optimized.AppendLine(" uses: actions/cache@v3.3.2");
optimized.AppendLine(" with:");
optimized.AppendLine(" path: ~/.npm");
optimized.AppendLine(" key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}");
optimized.AppendLine(" restore-keys: |");
optimized.AppendLine(" ${{ runner.os }}-node-");
optimized.AppendLine();
}
foreach (var step in filteredSteps)
{
optimized.AppendLine($" - name: {step.Name}");
if (!string.IsNullOrEmpty(step.Action))
{
optimized.AppendLine($" uses: {step.Action}");
}
if (!string.IsNullOrEmpty(step.Script))
{
optimized.AppendLine($" run: {step.Script}");
}
optimized.AppendLine();
}
optimized.AppendLine();
}
private PerformanceMetrics CalculatePerformanceMetrics(PipelineData pipelineData, PipelineOptimizationResult result)
{
var metrics = new PerformanceMetrics();
// Calculate original build time
var independentJobs = pipelineData.Jobs.Where(j => !j.Dependencies.Any()).ToList();
var dependentJobs = pipelineData.Jobs.Where(j => j.Dependencies.Any()).ToList();
// Parallel execution time is the max of independent jobs
var independentJobsTime = independentJobs.Any()
? independentJobs.Max(j => j.Steps.Sum(s => s.EstimatedDuration))
: 0;
// Sequential time for dependent jobs
var dependentJobsTime = dependentJobs.Sum(j => j.Steps.Sum(s => s.EstimatedDuration));
metrics.EstimatedOriginalBuildTime = $"{(independentJobsTime + dependentJobsTime) / 60:F1} minutes";
metrics.CurrentParallelJobs = independentJobs.Count;
// Calculate potential time savings
var cachingSavings = result.BuildTimeOptimizations
.Where(b => b.Type == "Caching")
.Sum(b => independentJobsTime * 0.4); // 40% average savings from caching
var parallelizationSavings = result.ParallelizationOpportunities
.Where(p => p.Type == "Job Parallelization")
.Sum(p => dependentJobsTime * 0.5); // 50% savings from parallelization
var unnecessaryStepsSavings = result.UnnecessarySteps
.Sum(u => u.TimeSaved);
var totalTimeSavings = cachingSavings + parallelizationSavings + unnecessaryStepsSavings;
metrics.EstimatedTimeSaving = $"{totalTimeSavings / 60:F1} minutes";
// Estimate cost savings (rough calculation based on GitHub Actions pricing)
var costPerMinute = 0.008; // Approximate cost per minute for standard runners
var estimatedCostSaving = totalTimeSavings * costPerMinute;
metrics.EstimatedCostSaving = $"${estimatedCostSaving:F2} per run";
// Calculate optimized build time
var optimizedBuildTime = (independentJobsTime + dependentJobsTime) - totalTimeSavings;
metrics.EstimatedOptimizedBuildTime = $"{optimizedBuildTime / 60:F1} minutes";
// Performance improvement percentage
if (independentJobsTime + dependentJobsTime > 0)
{
var improvementPercentage = (totalTimeSavings / (independentJobsTime + dependentJobsTime)) * 100;
metrics.PerformanceImprovement = $"{improvementPercentage:F1}%";
}
return metrics;
}
private int CalculateOptimizationScore(PipelineOptimizationResult result)
{
var score = 100;
// Deduct points for missed optimization opportunities
score -= result.BuildTimeOptimizations.Count * 5;
score -= result.ParallelizationOpportunities.Count * 8;
score -= result.ResourceOptimizations.Count * 6;
score -= result.UnnecessarySteps.Count * 3;
return Math.Max(0, score);
}
}
// Data models for Pipeline Optimization
public class PipelineData
{
public string PipelineType { get; set; }
public List<PipelineJob> Jobs { get; set; } = new();
public int TotalJobs { get; set; }
public int TotalSteps { get; set; }
public List<string> ParseErrors { get; set; } = new();
}
public class PipelineJob
{
public string Name { get; set; }
public string Id { get; set; }
public string RunsOn { get; set; }
public int? TimeoutMinutes { get; set; }
public List<string> Dependencies { get; set; } = new();
public string Strategy { get; set; } = "single";
public List<PipelineStep> Steps { get; set; } = new();
}
public class PipelineStep
{
public string Name { get; set; }
public string Type { get; set; }
public string Action { get; set; }
public string Script { get; set; }
public int EstimatedDuration { get; set; } // in seconds
}
public class PipelineOptimizationResult
{
public string PipelineConfig { get; set; }
public string PipelineType { get; set; }
public int OriginalJobCount { get; set; }
public int OriginalStepCount { get; set; }
public List<BuildTimeOptimization> BuildTimeOptimizations { get; set; } = new();
public List<ParallelizationOpportunity> ParallelizationOpportunities { get; set; } = new();
public List<ResourceOptimization> ResourceOptimizations { get; set; } = new();
public List<UnnecessaryStep> UnnecessarySteps { get; set; } = new();
}
public class BuildTimeOptimization
{
public string Type { get; set; }
public string Description { get; set; }
public string Recommendation { get; set; }
public string EstimatedTimeSaving { get; set; }
public string Implementation { get; set; }
}
public class ParallelizationOpportunity
{
public string Type { get; set; }
public string Description { get; set; }
public List<string> JobsAffected { get; set; } = new();
public string Recommendation { get; set; }
public string EstimatedTimeSaving { get; set; }
}
public class ResourceOptimization
{
public string Type { get; set; }
public string Description { get; set; }
public string Recommendation { get; set; }
public string ResourceType { get; set; }
public string EstimatedCostSaving { get; set; }
}
public class UnnecessaryStep
{
public string Type { get; set; }
public string Description { get; set; }
public string JobName { get; set; }
public List<string> StepNames { get; set; } = new();
public string Recommendation { get; set; }
public int TimeSaved { get; set; } // in seconds
}
public class PerformanceMetrics
{
public string EstimatedOriginalBuildTime { get; set; }
public string EstimatedOptimizedBuildTime { get; set; }
public string EstimatedTimeSaving { get; set; }
public string EstimatedCostSaving { get; set; }
public string PerformanceImprovement { get; set; }
public int CurrentParallelJobs { get; set; }
}
}