using LibGit2Sharp; using MarketAlly.AIPlugin.DevOps.Plugins; using Microsoft.Extensions.Logging; using System; using System.Collections.Generic; using System.IO; using System.Text.Json; using System.Threading.Tasks; namespace MarketAlly.AIPlugin.DevOps.Examples { class Program { static async Task Main(string[] args) { // Setup logging using var loggerFactory = LoggerFactory.Create(builder => { builder .AddConsole() .SetMinimumLevel(Microsoft.Extensions.Logging.LogLevel.Information); }); var logger = loggerFactory.CreateLogger(); var registry = new AIPluginRegistry(logger); // Register DevOps plugins registry.RegisterPlugin(new DevOpsScanPlugin(loggerFactory.CreateLogger())); registry.RegisterPlugin(new DockerfileAnalyzerPlugin(loggerFactory.CreateLogger())); registry.RegisterPlugin(new ConfigurationAnalyzerPlugin(loggerFactory.CreateLogger())); registry.RegisterPlugin(new PipelineOptimizerPlugin(loggerFactory.CreateLogger())); registry.RegisterPlugin(new ChangelogGeneratorPlugin(loggerFactory.CreateLogger())); Console.WriteLine("=== MarketAlly AI Plugin DevOps Toolkit Demo ===\n"); // Demo 1: Analyze GitHub Actions workflow await DemoGitHubActionsAnalysis(registry); // Demo 2: Analyze Dockerfile await DemoDockerfileAnalysis(registry); // Demo 3: Create sample files and analyze them await DemoCreateAndAnalyzeSampleFiles(registry); // Demo 4: Configuration Analysis await DemoConfigurationAnalysis(registry); // Demo 5: Pipeline Optimization await DemoPipelineOptimization(registry); // Demo 6: Changelog Generation await DemoChangelogGeneration(registry); Console.WriteLine("\n=== Demo completed! ==="); Console.WriteLine("Press any key to exit..."); Console.ReadKey(); } private static async Task DemoGitHubActionsAnalysis(AIPluginRegistry registry) { Console.WriteLine("📋 Demo 1: GitHub Actions Workflow Analysis"); Console.WriteLine("=" + new string('=', 50)); // Create a sample GitHub Actions workflow file var workflowDir = Path.Combine("sample-project", ".github", "workflows"); Directory.CreateDirectory(workflowDir); var workflowPath = Path.Combine(workflowDir, "ci.yml"); var sampleWorkflow = """ name: CI Pipeline on: push: branches: [ main ] pull_request: branches: [ main ] jobs: test: runs-on: ubuntu-latest steps: - uses: actions/checkout@main - name: Setup Node uses: actions/setup-node@v3 with: node-version: '18' - name: Install dependencies run: npm install - name: Run tests run: npm test deploy: needs: test runs-on: ubuntu-latest if: github.ref == 'refs/heads/main' steps: - uses: actions/checkout@main - name: Deploy run: | echo "Deploying with API_KEY=sk-1234567890abcdef" ./deploy.sh """; await File.WriteAllTextAsync(workflowPath, sampleWorkflow); try { var result = await registry.CallFunctionAsync("DevOpsScan", new Dictionary { ["pipelinePath"] = workflowDir, ["pipelineType"] = "github", ["checkSecurity"] = true, ["optimizeBuild"] = true, ["checkBestPractices"] = true, ["generateRecommendations"] = true }); if (result.Success) { Console.WriteLine("✅ Pipeline analysis completed successfully!\n"); dynamic data = result.Data; Console.WriteLine($"📊 Analysis Summary:"); Console.WriteLine($" Files analyzed: {data.FilesAnalyzed}"); Console.WriteLine($" Security issues: {data.Summary.TotalSecurityIssues}"); Console.WriteLine($" Optimization opportunities: {data.Summary.TotalOptimizations}"); Console.WriteLine($" Best practice violations: {data.Summary.TotalBestPracticeViolations}"); Console.WriteLine($" Overall score: {data.Summary.OverallScore}/100\n"); if (data.SecurityIssues != null && ((IList)data.SecurityIssues).Count > 0) { Console.WriteLine("🔒 Security Issues Found:"); foreach (var issue in data.SecurityIssues) { Console.WriteLine($" ⚠️ {issue.Severity}: {issue.Issue}"); Console.WriteLine($" 💡 {issue.Recommendation}\n"); } } if (data.BestPracticeViolations != null && ((IList)data.BestPracticeViolations).Count > 0) { Console.WriteLine("📋 Best Practice Violations:"); foreach (var violation in data.BestPracticeViolations) { Console.WriteLine($" 📝 {violation.Rule}: {violation.Description}"); Console.WriteLine($" 💡 {violation.Recommendation}\n"); } } if (data.OptimizationOpportunities != null && ((IList)data.OptimizationOpportunities).Count > 0) { Console.WriteLine("⚡ Optimization Opportunities:"); foreach (var optimization in data.OptimizationOpportunities) { Console.WriteLine($" 🚀 {optimization.Type}: {optimization.Description}"); Console.WriteLine($" 💡 {optimization.Recommendation}"); if (optimization.EstimatedTimesSaving != null) { Console.WriteLine($" ⏱️ {optimization.EstimatedTimesSaving}"); } Console.WriteLine(); } } if (data.Recommendations != null && ((IList)data.Recommendations).Count > 0) { Console.WriteLine("🎯 Key Recommendations:"); foreach (var recommendation in data.Recommendations) { Console.WriteLine($" • {recommendation}"); } Console.WriteLine(); } } else { Console.WriteLine($"❌ Pipeline analysis failed: {result.Message}"); } } catch (Exception ex) { Console.WriteLine($"❌ Error during pipeline analysis: {ex.Message}"); } Console.WriteLine(); } private static async Task DemoDockerfileAnalysis(AIPluginRegistry registry) { Console.WriteLine("🐳 Demo 2: Dockerfile Analysis"); Console.WriteLine("=" + new string('=', 50)); // Create a sample Dockerfile with various issues var dockerfilePath = Path.Combine("sample-project", "Dockerfile"); var sampleDockerfile = """ FROM node:18 RUN apt-get update RUN apt-get install -y curl RUN apt-get install -y wget RUN apt-get install -y git WORKDIR /app COPY package*.json ./ RUN npm install COPY . . RUN echo "API_KEY=sk-1234567890abcdef" > .env EXPOSE 3000 CMD ["npm", "start"] """; await File.WriteAllTextAsync(dockerfilePath, sampleDockerfile); try { var result = await registry.CallFunctionAsync("DockerfileAnalyzer", new Dictionary { ["dockerfilePath"] = dockerfilePath, ["checkSecurity"] = true, ["optimizeSize"] = true, ["checkBestPractices"] = true, ["checkMultiStage"] = true, ["generateOptimized"] = true }); if (result.Success) { Console.WriteLine("✅ Dockerfile analysis completed successfully!\n"); dynamic data = result.Data; Console.WriteLine($"📊 Analysis Summary:"); Console.WriteLine($" Base image: {data.BaseImage}"); Console.WriteLine($" Total instructions: {data.TotalInstructions}"); Console.WriteLine($" Security score: {data.Summary.SecurityScore}/100"); Console.WriteLine($" Optimization score: {data.Summary.OptimizationScore}/100"); Console.WriteLine($" Best practice score: {data.Summary.BestPracticeScore}/100"); Console.WriteLine($" Overall score: {data.Summary.OverallScore}/100\n"); if (data.SecurityIssues != null && ((IList)data.SecurityIssues).Count > 0) { Console.WriteLine("🔒 Security Issues Found:"); foreach (var issue in data.SecurityIssues) { Console.WriteLine($" ⚠️ {issue.Severity}: {issue.Issue}"); if (issue.LineNumber != null) { Console.WriteLine($" 📍 Line {issue.LineNumber}"); } Console.WriteLine($" 💡 {issue.Recommendation}\n"); } } if (data.SizeOptimizations != null && ((IList)data.SizeOptimizations).Count > 0) { Console.WriteLine("📦 Size Optimization Opportunities:"); foreach (var optimization in data.SizeOptimizations) { Console.WriteLine($" 🚀 {optimization.Type}: {optimization.Description}"); Console.WriteLine($" 💡 {optimization.Recommendation}"); if (optimization.EstimatedSizeSaving != null) { Console.WriteLine($" 💾 {optimization.EstimatedSizeSaving}"); } Console.WriteLine(); } } if (data.BestPracticeViolations != null && ((IList)data.BestPracticeViolations).Count > 0) { Console.WriteLine("📋 Best Practice Violations:"); foreach (var violation in data.BestPracticeViolations) { Console.WriteLine($" 📝 {violation.Rule}: {violation.Description}"); Console.WriteLine($" 💡 {violation.Recommendation}"); Console.WriteLine($" ⚡ Impact: {violation.Impact}\n"); } } if (data.OptimizedDockerfile != null) { Console.WriteLine("🛠️ Optimized Dockerfile Generated:"); Console.WriteLine(" 📄 Preview (first 10 lines):"); var lines = data.OptimizedDockerfile.ToString().Split('\n'); for (int i = 0; i < Math.Min(10, lines.Length); i++) { Console.WriteLine($" {i + 1:D2}: {lines[i]}"); } if (lines.Length > 10) { Console.WriteLine($" ... ({lines.Length - 10} more lines)"); } // Save optimized Dockerfile var optimizedPath = Path.Combine("sample-project", "Dockerfile.optimized"); await File.WriteAllTextAsync(optimizedPath, data.OptimizedDockerfile.ToString()); Console.WriteLine($"\n 💾 Saved to: {optimizedPath}"); } } else { Console.WriteLine($"❌ Dockerfile analysis failed: {result.Message}"); } } catch (Exception ex) { Console.WriteLine($"❌ Error during Dockerfile analysis: {ex.Message}"); } Console.WriteLine(); } private static async Task DemoCreateAndAnalyzeSampleFiles(AIPluginRegistry registry) { Console.WriteLine("🔧 Demo 3: Sample Project Creation and Analysis"); Console.WriteLine("=" + new string('=', 50)); // Create a sample project structure var projectDir = "demo-project"; Directory.CreateDirectory(projectDir); // Create a better GitHub Actions workflow var workflowDir = Path.Combine(projectDir, ".github", "workflows"); Directory.CreateDirectory(workflowDir); var improvedWorkflow = """ name: Improved CI/CD Pipeline on: push: branches: [ main, develop ] pull_request: branches: [ main ] permissions: contents: read checks: write pull-requests: write jobs: test: name: Run Tests runs-on: ubuntu-latest timeout-minutes: 10 strategy: matrix: node-version: [16, 18, 20] steps: - name: Checkout code uses: actions/checkout@v4.1.1 - name: Cache dependencies uses: actions/cache@v3.3.2 with: path: ~/.npm key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} restore-keys: | ${{ runner.os }}-node- - name: Setup Node.js uses: actions/setup-node@v4.0.0 with: node-version: ${{ matrix.node-version }} - name: Install dependencies run: npm ci - name: Run tests run: npm test - name: Upload test results uses: actions/upload-artifact@v3.1.3 if: always() with: name: test-results-${{ matrix.node-version }} path: test-results/ build: name: Build Application runs-on: ubuntu-latest timeout-minutes: 15 needs: test steps: - name: Checkout code uses: actions/checkout@v4.1.1 - name: Setup Node.js uses: actions/setup-node@v4.0.0 with: node-version: '18' - name: Cache dependencies uses: actions/cache@v3.3.2 with: path: ~/.npm key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - name: Install dependencies run: npm ci - name: Build application run: npm run build - name: Upload build artifacts uses: actions/upload-artifact@v3.1.3 with: name: build-files path: dist/ deploy: name: Deploy to Production runs-on: ubuntu-latest timeout-minutes: 10 needs: [test, build] if: github.ref == 'refs/heads/main' && github.event_name == 'push' environment: production steps: - name: Download build artifacts uses: actions/download-artifact@v3.0.2 with: name: build-files path: dist/ - name: Deploy to server env: DEPLOY_KEY: ${{ secrets.DEPLOY_KEY }} SERVER_HOST: ${{ secrets.SERVER_HOST }} run: | echo "Deploying to production..." # Your deployment script here """; await File.WriteAllTextAsync(Path.Combine(workflowDir, "improved-ci.yml"), improvedWorkflow); // Create an improved Dockerfile var improvedDockerfile = """ # Multi-stage build for optimal size and security FROM node:18-alpine AS builder # Set working directory WORKDIR /app # Copy package files first for better caching COPY package*.json ./ # Install dependencies RUN npm ci --only=production && npm cache clean --force # Copy source code COPY . . # Build the application RUN npm run build # Production stage FROM node:18-alpine AS production # Add metadata labels LABEL maintainer="devops@company.com" LABEL version="1.0.0" LABEL description="Sample Node.js application" # Create non-root user RUN addgroup -g 1001 -S nodejs && \ adduser -S nextjs -u 1001 -G nodejs # Set working directory WORKDIR /app # Copy built application from builder stage COPY --from=builder --chown=nextjs:nodejs /app/dist ./dist COPY --from=builder --chown=nextjs:nodejs /app/node_modules ./node_modules COPY --chown=nextjs:nodejs package*.json ./ # Switch to non-root user USER nextjs # Expose port EXPOSE 3000 # Add health check HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ CMD curl -f http://localhost:3000/health || exit 1 # Start the application CMD ["node", "dist/index.js"] """; await File.WriteAllTextAsync(Path.Combine(projectDir, "Dockerfile"), improvedDockerfile); // Create .dockerignore var dockerignore = """ node_modules npm-debug.log .git .gitignore README.md .env .nyc_output coverage .cache .DS_Store """; await File.WriteAllTextAsync(Path.Combine(projectDir, ".dockerignore"), dockerignore); Console.WriteLine("📁 Created sample project structure:"); Console.WriteLine($" 📂 {projectDir}/"); Console.WriteLine($" ├── 📂 .github/workflows/"); Console.WriteLine($" │ └── 📄 improved-ci.yml"); Console.WriteLine($" ├── 📄 Dockerfile"); Console.WriteLine($" └── 📄 .dockerignore\n"); // Analyze the improved project Console.WriteLine("🔍 Analyzing improved project...\n"); try { // Analyze the improved workflow var workflowResult = await registry.CallFunctionAsync("DevOpsScan", new Dictionary { ["pipelinePath"] = workflowDir, ["pipelineType"] = "github", ["checkSecurity"] = true, ["optimizeBuild"] = true, ["checkBestPractices"] = true, ["generateRecommendations"] = true }); if (workflowResult.Success) { dynamic workflowData = workflowResult.Data; Console.WriteLine("✅ Improved Workflow Analysis:"); Console.WriteLine($" Overall score: {workflowData.Summary.OverallScore}/100"); Console.WriteLine($" Security issues: {workflowData.Summary.TotalSecurityIssues}"); Console.WriteLine($" Optimization opportunities: {workflowData.Summary.TotalOptimizations}"); Console.WriteLine($" Best practice violations: {workflowData.Summary.TotalBestPracticeViolations}\n"); } // Analyze the improved Dockerfile var dockerResult = await registry.CallFunctionAsync("DockerfileAnalyzer", new Dictionary { ["dockerfilePath"] = Path.Combine(projectDir, "Dockerfile"), ["checkSecurity"] = true, ["optimizeSize"] = true, ["checkBestPractices"] = true, ["checkMultiStage"] = true }); if (dockerResult.Success) { dynamic dockerData = dockerResult.Data; Console.WriteLine("✅ Improved Dockerfile Analysis:"); Console.WriteLine($" Overall score: {dockerData.Summary.OverallScore}/100"); Console.WriteLine($" Security score: {dockerData.Summary.SecurityScore}/100"); Console.WriteLine($" Optimization score: {dockerData.Summary.OptimizationScore}/100"); Console.WriteLine($" Best practice score: {dockerData.Summary.BestPracticeScore}/100"); Console.WriteLine($" Multi-stage build: {dockerData.MultiStageAnalysis.IsMultiStage}"); Console.WriteLine($" Stage count: {dockerData.MultiStageAnalysis.StageCount}\n"); } // Generate analysis report var report = new { AnalysisDate = DateTime.UtcNow, ProjectPath = projectDir, WorkflowAnalysis = workflowResult.Data, DockerfileAnalysis = dockerResult.Data, Summary = new { WorkflowScore = workflowResult.Success ? ((dynamic)workflowResult.Data).Summary.OverallScore : 0, DockerScore = dockerResult.Success ? ((dynamic)dockerResult.Data).Summary.OverallScore : 0 } }; var reportJson = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true, PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); var reportPath = Path.Combine(projectDir, "devops-analysis-report.json"); await File.WriteAllTextAsync(reportPath, reportJson); Console.WriteLine($"📊 Analysis report saved to: {reportPath}"); } catch (Exception ex) { Console.WriteLine($"❌ Error during improved project analysis: {ex.Message}"); } } private static async Task DemoConfigurationAnalysis(AIPluginRegistry registry) { Console.WriteLine("⚙️ Demo 4: Configuration Analysis"); Console.WriteLine("=" + new string('=', 50)); // Create sample configuration files var configDir = "sample-config"; Directory.CreateDirectory(configDir); // Create appsettings.json (base config) var appSettings = """ { "Logging": { "LogLevel": { "Default": "Information", "Microsoft": "Warning" } }, "ConnectionStrings": { "DefaultConnection": "Server=localhost;Database=MyApp;Trusted_Connection=true;" }, "AppSettings": { "ApiUrl": "https://api.example.com", "EnableDebug": false, "MaxRetries": 3 } } """; await File.WriteAllTextAsync(Path.Combine(configDir, "appsettings.json"), appSettings); // Create appsettings.Development.json var devSettings = """ { "Logging": { "LogLevel": { "Default": "Debug", "Microsoft": "Information" } }, "ConnectionStrings": { "DefaultConnection": "Server=localhost;Database=MyApp_Dev;Trusted_Connection=true;" }, "AppSettings": { "ApiUrl": "https://dev-api.example.com", "EnableDebug": true, "MaxRetries": 1, "SecretKey": "dev-secret-12345" } } """; await File.WriteAllTextAsync(Path.Combine(configDir, "appsettings.Development.json"), devSettings); // Create appsettings.Production.json var prodSettings = """ { "Logging": { "LogLevel": { "Default": "Warning", "Microsoft": "Error" } }, "ConnectionStrings": { "DefaultConnection": "Server=prod-server;Database=MyApp_Prod;User=dbuser;Password=prod-password-123;" }, "AppSettings": { "ApiUrl": "https://api.example.com", "EnableDebug": true, "MaxRetries": 5 } } """; await File.WriteAllTextAsync(Path.Combine(configDir, "appsettings.Production.json"), prodSettings); Console.WriteLine("📁 Created sample configuration files:"); Console.WriteLine($" 📂 {configDir}/"); Console.WriteLine($" ├── 📄 appsettings.json"); Console.WriteLine($" ├── 📄 appsettings.Development.json"); Console.WriteLine($" └── 📄 appsettings.Production.json\n"); try { var result = await registry.CallFunctionAsync("ConfigurationAnalyzer", new Dictionary { ["configDirectory"] = configDir, ["filePatterns"] = "*.json", ["checkDrift"] = true, ["validateEnvironments"] = true, ["checkSettings"] = true, ["generateDocumentation"] = true }); if (result.Success) { Console.WriteLine("✅ Configuration analysis completed successfully!\n"); dynamic data = result.Data; Console.WriteLine($"📊 Analysis Summary:"); Console.WriteLine($" Files analyzed: {data.FilesAnalyzed}"); Console.WriteLine($" Configuration drift detected: {data.Summary.DriftDetected}"); Console.WriteLine($" Missing settings: {data.Summary.MissingSettingsCount}"); Console.WriteLine($" Security issues: {data.Summary.SecurityIssuesCount}"); Console.WriteLine($" Overall score: {data.Summary.OverallScore}/100\n"); if (data.ConfigurationDrift != null && ((IList)data.ConfigurationDrift).Count > 0) { Console.WriteLine("🔄 Configuration Drift Detected:"); foreach (var drift in data.ConfigurationDrift) { Console.WriteLine($" ⚠️ Key: {drift.Key}"); Console.WriteLine($" Type: {drift.DriftType}"); Console.WriteLine($" Values: {string.Join(", ", ((Dictionary)drift.EnvironmentValues).Select(kvp => $"{kvp.Key}={kvp.Value}"))}"); Console.WriteLine($" 💡 {drift.Recommendation}\n"); } } if (data.SecurityIssues != null && ((IList)data.SecurityIssues).Count > 0) { Console.WriteLine("🔒 Security Issues Found:"); foreach (var issue in data.SecurityIssues) { Console.WriteLine($" ⚠️ {issue.Severity}: {issue.Issue}"); Console.WriteLine($" 📍 {issue.Location}"); Console.WriteLine($" 💡 {issue.Recommendation}\n"); } } if (data.Documentation != null) { var docPath = Path.Combine(configDir, "configuration-analysis.md"); await File.WriteAllTextAsync(docPath, data.Documentation.ToString()); Console.WriteLine($"📚 Documentation generated: {docPath}"); } } else { Console.WriteLine($"❌ Configuration analysis failed: {result.Message}"); } } catch (Exception ex) { Console.WriteLine($"❌ Error during configuration analysis: {ex.Message}"); } Console.WriteLine(); } private static async Task DemoPipelineOptimization(AIPluginRegistry registry) { Console.WriteLine("⚡ Demo 5: Pipeline Optimization"); Console.WriteLine("=" + new string('=', 50)); // Use the workflow file we created earlier var workflowPath = Path.Combine("sample-project", ".github", "workflows", "ci.yml"); if (!File.Exists(workflowPath)) { Console.WriteLine("❌ Workflow file not found. Skipping pipeline optimization demo."); return; } try { var result = await registry.CallFunctionAsync("PipelineOptimizer", new Dictionary { ["pipelineConfig"] = workflowPath, ["optimizeBuildTime"] = true, ["checkParallelization"] = true, ["analyzeResources"] = true, ["checkUnnecessarySteps"] = true, ["generateOptimized"] = true }); if (result.Success) { Console.WriteLine("✅ Pipeline optimization completed successfully!\n"); dynamic data = result.Data; Console.WriteLine($"📊 Optimization Summary:"); Console.WriteLine($" Pipeline type: {data.PipelineType}"); Console.WriteLine($" Original jobs: {data.OriginalMetrics.JobCount}"); Console.WriteLine($" Original steps: {data.OriginalMetrics.StepCount}"); Console.WriteLine($" Estimated build time: {data.OriginalMetrics.EstimatedBuildTime}"); Console.WriteLine($" Total optimizations: {data.Summary.TotalOptimizations}"); Console.WriteLine($" Estimated time saving: {data.Summary.EstimatedTimeSaving}"); Console.WriteLine($" Estimated cost saving: {data.Summary.EstimatedCostSaving}"); Console.WriteLine($" Optimization score: {data.Summary.OptimizationScore}/100\n"); if (data.BuildTimeOptimizations != null && ((IList)data.BuildTimeOptimizations).Count > 0) { Console.WriteLine("⏱️ Build Time Optimizations:"); foreach (var optimization in data.BuildTimeOptimizations) { Console.WriteLine($" 🚀 {optimization.Type}: {optimization.Description}"); Console.WriteLine($" 💡 {optimization.Recommendation}"); Console.WriteLine($" ⏱️ {optimization.EstimatedTimeSaving}\n"); } } if (data.ParallelizationOpportunities != null && ((IList)data.ParallelizationOpportunities).Count > 0) { Console.WriteLine("🔄 Parallelization Opportunities:"); foreach (var opportunity in data.ParallelizationOpportunities) { Console.WriteLine($" ⚡ {opportunity.Type}: {opportunity.Description}"); Console.WriteLine($" 💡 {opportunity.Recommendation}"); Console.WriteLine($" ⏱️ {opportunity.EstimatedTimeSaving}\n"); } } if (data.OptimizedConfig != null) { var optimizedPath = Path.Combine("sample-project", "optimized-workflow.yml"); await File.WriteAllTextAsync(optimizedPath, data.OptimizedConfig.ToString()); Console.WriteLine($"🛠️ Optimized pipeline saved to: {optimizedPath}"); } } else { Console.WriteLine($"❌ Pipeline optimization failed: {result.Message}"); } } catch (Exception ex) { Console.WriteLine($"❌ Error during pipeline optimization: {ex.Message}"); } Console.WriteLine(); } private static async Task DemoChangelogGeneration(AIPluginRegistry registry) { Console.WriteLine("📝 Demo 6: Changelog Generation"); Console.WriteLine("=" + new string('=', 50)); // Create a sample git repository with some commits var repoPath = "sample-repo"; if (Directory.Exists(repoPath)) { Directory.Delete(repoPath, true); } try { // Initialize a new git repository using (var repo = new LibGit2Sharp.Repository(LibGit2Sharp.Repository.Init(repoPath))) { // Configure the repository var signature = new LibGit2Sharp.Signature("Demo User", "demo@example.com", DateTimeOffset.Now); // Create some sample files and commits var commits = new[] { ("feat: add user authentication system", "Initial implementation of JWT-based authentication"), ("fix: resolve login validation issue", "Fixed bug where empty passwords were accepted"), ("feat(api): implement user profile endpoints", "Added GET, PUT, DELETE endpoints for user profiles"), ("docs: update API documentation", "Added comprehensive API documentation with examples"), ("fix(security): patch XSS vulnerability", "Sanitized user input to prevent XSS attacks"), ("feat!: migrate to new database schema", "BREAKING CHANGE: Updated user table structure"), ("chore: update dependencies", "Updated all npm packages to latest versions"), ("test: add unit tests for auth module", "Increased test coverage for authentication"), ("perf: optimize database queries", "Reduced API response time by 40%"), ("refactor(ui): modernize component structure", "Reorganized React components for better maintainability") }; foreach (var (message, description) in commits) { // Create a sample file var fileName = $"file_{commits.ToList().IndexOf((message, description)) + 1}.txt"; var filePath = Path.Combine(repoPath, fileName); await File.WriteAllTextAsync(filePath, $"Sample content for {message}\n{description}"); // Stage and commit LibGit2Sharp.Commands.Stage(repo, fileName); repo.Commit(message, signature, signature); // Add some time between commits await Task.Delay(100); } // Create a tag repo.Tags.Add("v1.0.0", repo.Head.Tip, signature, "Version 1.0.0 release"); Console.WriteLine($"📁 Created sample git repository: {repoPath}"); Console.WriteLine($" 📝 Added {commits.Length} sample commits"); Console.WriteLine($" 🏷️ Tagged as v1.0.0\n"); // Generate changelog var result = await registry.CallFunctionAsync("ChangelogGenerator", new Dictionary { ["repositoryPath"] = repoPath, ["fromVersion"] = "", // From beginning ["toVersion"] = "HEAD", ["format"] = "markdown", ["groupByType"] = true, ["includeAuthors"] = true, ["outputPath"] = Path.Combine(repoPath, "CHANGELOG.md") }); if (result.Success) { Console.WriteLine("✅ Changelog generation completed successfully!\n"); dynamic data = result.Data; Console.WriteLine($"📊 Changelog Summary:"); Console.WriteLine($" Version range: {data.VersionRange}"); Console.WriteLine($" Commits processed: {data.CommitsProcessed}"); Console.WriteLine($" Features: {data.Summary.Features}"); Console.WriteLine($" Bug fixes: {data.Summary.Fixes}"); Console.WriteLine($" Breaking changes: {data.Summary.BreakingChanges}"); Console.WriteLine($" Other changes: {data.Summary.OtherChanges}"); Console.WriteLine($" Unique authors: {data.Summary.UniqueAuthors}"); Console.WriteLine($" Date range: {data.Summary.DateRange}\n"); Console.WriteLine("📄 Generated Changelog Preview (first 20 lines):"); var changelogLines = data.ChangelogContent.ToString().Split('\n'); for (int i = 0; i < Math.Min(20, changelogLines.Length); i++) { Console.WriteLine($" {i + 1:D2}: {changelogLines[i]}"); } if (changelogLines.Length > 20) { Console.WriteLine($" ... ({changelogLines.Length - 20} more lines)"); } Console.WriteLine($"\n💾 Full changelog saved to: {data.OutputPath}"); // Generate additional formats var jsonResult = await registry.CallFunctionAsync("ChangelogGenerator", new Dictionary { ["repositoryPath"] = repoPath, ["format"] = "json", ["groupByType"] = true, ["outputPath"] = Path.Combine(repoPath, "changelog.json") }); var htmlResult = await registry.CallFunctionAsync("ChangelogGenerator", new Dictionary { ["repositoryPath"] = repoPath, ["format"] = "html", ["groupByType"] = true, ["outputPath"] = Path.Combine(repoPath, "changelog.html") }); if (jsonResult.Success && htmlResult.Success) { Console.WriteLine("📄 Additional formats generated:"); Console.WriteLine($" 📊 JSON: {Path.Combine(repoPath, "changelog.json")}"); Console.WriteLine($" 🌐 HTML: {Path.Combine(repoPath, "changelog.html")}"); } } else { Console.WriteLine($"❌ Changelog generation failed: {result.Message}"); } } } catch (Exception ex) { Console.WriteLine($"❌ Error during changelog generation: {ex.Message}"); } Console.WriteLine(); } } }