Documentation
¶
Overview ¶
Package metrics - AI Impact Metrics (HIR/AAC/TPH) calculation engine
Package metrics - Caching infrastructure with TTL support for metrics ¶
Package metrics - Code Health Index (CHI) calculation engine ¶
Package metrics implements calculation engines for DORA, CHI, and AI impact metrics.
Package metrics - Enhanced DORA calculator with timezone, caching, and GraphQL support ¶
Package metrics - Enhanced types for mature DORA/CHI/HIR metrics with timezone and caching support
Package metrics - GraphQL client for GitHub API heavy aggregations ¶
Package metrics - Time utilities for timezone and period handling
Index ¶
- type AIAssistData
- type AIAssistancePoint
- type AIImpactAnalysis
- type AIMetricsCalculator
- type AIToolUsage
- type AggregatedAIMetrics
- type AggregatedCHIMetrics
- type AggregatedDORAMetrics
- type AggregatedMetrics
- type Assignees
- type AuthProvider
- type BranchRef
- type CHICalculator
- type CHITrendAnalysis
- type CacheConfig
- type CacheEntry
- type CacheInfo
- type CacheMiddleware
- func (cm *CacheMiddleware) CacheOrCompute(ctx context.Context, metricType string, request MetricsRequest, ...) (interface{}, CacheInfo, error)
- func (cm *CacheMiddleware) GetCacheStats() CacheStats
- func (cm *CacheMiddleware) InvalidateMetricTypeCache(ctx context.Context, metricType string) int
- func (cm *CacheMiddleware) InvalidateRepositoryCache(ctx context.Context, repoFullName string) int
- func (cm *CacheMiddleware) WarmupCache(ctx context.Context, repositories []string, metricTypes []string) error
- type CacheStats
- type CodeFile
- type CodeQualityImpact
- type CodingTime
- type Collaborators
- type Comment
- type Comments
- type Commit
- type CommitComment
- type CommitComments
- type ComplexityHotspot
- type CrossRepoInsights
- type DORACalculator
- type DORAConfig
- type DORAPercentiles
- type DORATimeSeriesPoint
- type DataQuality
- type Deployment
- type DeploymentStatus
- type DeploymentStatuses
- type DeploymentTrend
- type Deployments
- type DeveloperEfficiency
- type Duplication
- type EnhancedAIMetrics
- type EnhancedCHIMetrics
- type EnhancedDORACalculator
- type EnhancedDORAMetrics
- type FileMetric
- type GitActor
- type GitClient
- type GitHubClient
- type GraphQLClient
- func (gc *GraphQLClient) GetMultipleRepositoryMetrics(ctx context.Context, repositories []string, since time.Time) (map[string]*RepositoryMetricsData, error)
- func (gc *GraphQLClient) GetOrganizationRepositories(ctx context.Context, org string, limit int) ([]string, error)
- func (gc *GraphQLClient) GetRepositoryMetrics(ctx context.Context, owner, repo string, since time.Time) (*RepositoryMetricsData, error)
- type GraphQLCommit
- type GraphQLDeployment
- type GraphQLError
- type GraphQLIssue
- type GraphQLPullRequest
- type GraphQLRequest
- type GraphQLResponse
- type History
- type HumanVsAIContributions
- type IDEClient
- type IncidentClassification
- type Issue
- type Issues
- type JiraClient
- type Label
- type Labels
- type Language
- type LanguageEdge
- type LanguageHealthMetric
- type LanguageMetric
- type LanguageTime
- type Languages
- type Location
- type MetricsCache
- func (mc *MetricsCache) Clear(ctx context.Context)
- func (mc *MetricsCache) Delete(ctx context.Context, key string)
- func (mc *MetricsCache) GenerateCacheKey(metricType string, request MetricsRequest) string
- func (mc *MetricsCache) Get(ctx context.Context, key string) (*CacheEntry, bool)
- func (mc *MetricsCache) GetStats() CacheStats
- func (mc *MetricsCache) GetTTLForMetricType(metricType string) time.Duration
- func (mc *MetricsCache) InvalidateByTags(ctx context.Context, tags []string) int
- func (mc *MetricsCache) ResetStats()
- func (mc *MetricsCache) Set(ctx context.Context, key string, data interface{}, ttl time.Duration, ...) error
- type MetricsRequest
- type OrganizationalHealth
- type Owner
- type ProductivityMetrics
- type ProjectTime
- type PullRequest
- type PullRequestCommits
- type PullRequestConnection
- type PullRequests
- type Reference
- type Release
- type Releases
- type RepositoryMetricsData
- type Review
- type Reviews
- type Target
- type TechnicalDebtItem
- type TestCoverageDetail
- type TimeRange
- type TimeUtils
- func (tu *TimeUtils) CalculateBusinessDays(start, end time.Time, timezone string) (int, error)
- func (tu *TimeUtils) CalculateWorkingHours(start, end time.Time, timezone string, startHour, endHour int, ...) (float64, error)
- func (tu *TimeUtils) ConvertFromUTC(utcTime time.Time, toTimezone string) (time.Time, error)
- func (tu *TimeUtils) ConvertToUTC(t time.Time, fromTimezone string) (time.Time, error)
- func (tu *TimeUtils) FormatTimeForTimezone(t time.Time, timezone, format string) (string, error)
- func (tu *TimeUtils) GetBusinessHours(timeRange TimeRange, startHour, endHour int, excludeWeekends bool) ([]TimeRange, error)
- func (tu *TimeUtils) GetCommonTimezones() []string
- func (tu *TimeUtils) GetNextBusinessDay(t time.Time, timezone string) (time.Time, error)
- func (tu *TimeUtils) GetPeriodBoundaries(baseTime time.Time, granularity string, timezone string, periodsBack int) ([]TimeRange, error)
- func (tu *TimeUtils) GetPeriodDuration(granularity string) (time.Duration, error)
- func (tu *TimeUtils) GetTimezoneOffset(t time.Time, timezone string) (int, error)
- func (tu *TimeUtils) IsBusinessDay(t time.Time) bool
- func (tu *TimeUtils) ParseTimeRange(start, end time.Time, timezone string) (TimeRange, error)
- func (tu *TimeUtils) ValidateTimezone(timezone string) error
- type User
- type WakaTimeClient
- type WorkflowRun
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type AIAssistData ¶
type AIAssistData struct {
TotalSuggestions int `json:"total_suggestions"`
AcceptedSuggestions int `json:"accepted_suggestions"`
AcceptanceRate float64 `json:"acceptance_rate"`
TimeWithAI float64 `json:"time_with_ai_hours"`
LinesGenerated int `json:"lines_generated"`
Provider string `json:"provider"` // copilot, codewhisperer, codeium, etc.
}
AIAssistData represents AI assistance data from IDE
type AIAssistancePoint ¶
type AIAssistancePoint struct {
Timestamp time.Time `json:"timestamp"`
HIR float64 `json:"hir"`
AAC float64 `json:"aac"`
TPH float64 `json:"tph"`
ActiveAITools []string `json:"active_ai_tools"`
ProductivityIndex float64 `json:"productivity_index"`
}
AIAssistancePoint represents AI assistance over time
type AIImpactAnalysis ¶
type AIImpactAnalysis struct {
HIRInsight string `json:"hir_insight"`
HIRRecommendation string `json:"hir_recommendation"`
AACInsight string `json:"aac_insight"`
AACRecommendation string `json:"aac_recommendation"`
TPHInsight string `json:"tph_insight"`
TPHRecommendation string `json:"tph_recommendation"`
OverallAssessment string `json:"overall_assessment"`
}
AIImpactAnalysis provides insights on AI usage patterns
type AIMetricsCalculator ¶
type AIMetricsCalculator struct {
// contains filtered or unexported fields
}
AIMetricsCalculator calculates Human Input Ratio, AI Assist Coverage, and Throughput per Human-hour
func NewAIMetricsCalculator ¶
func NewAIMetricsCalculator(wakatime WakaTimeClient, git GitClient, ide IDEClient) *AIMetricsCalculator
NewAIMetricsCalculator creates a new AI metrics calculator
func (*AIMetricsCalculator) AnalyzeAIImpact ¶
func (a *AIMetricsCalculator) AnalyzeAIImpact(hir, aac, tph float64) AIImpactAnalysis
AnalyzeAIImpact provides insights on AI usage patterns
type AIToolUsage ¶
type AIToolUsage struct {
ToolName string `json:"tool_name"` // "copilot", "chatgpt", "codeium", etc.
UsageHours float64 `json:"usage_hours"`
AcceptanceRate float64 `json:"acceptance_rate"`
LinesGenerated int `json:"lines_generated"`
LinesAccepted int `json:"lines_accepted"`
CodeQualityScore float64 `json:"code_quality_score"`
ProductivityBoost float64 `json:"productivity_boost"` // Percentage increase
}
AIToolUsage represents usage of specific AI tools
type AggregatedAIMetrics ¶
type AggregatedAIMetrics struct {
MeanHIR float64 `json:"mean_hir"`
MeanAAC float64 `json:"mean_aac"`
MeanTPH float64 `json:"mean_tph"`
TotalHumanHours float64 `json:"total_human_hours"`
TotalAIHours float64 `json:"total_ai_hours"`
MostAIAssistedRepo string `json:"most_ai_assisted_repo"`
LeastAIAssistedRepo string `json:"least_ai_assisted_repo"`
OrganizationalAIAdoption float64 `json:"organizational_ai_adoption"`
AverageProductivityBoost float64 `json:"average_productivity_boost"`
}
AggregatedAIMetrics represents AI metrics across repositories
type AggregatedCHIMetrics ¶
type AggregatedCHIMetrics struct {
MeanCHIScore int `json:"mean_chi_score"`
MeanDuplicationPercent float64 `json:"mean_duplication_pct"`
MeanCyclomaticComplexity float64 `json:"mean_cyclomatic_avg"`
MeanTestCoverage float64 `json:"mean_test_coverage_pct"`
MeanMaintainabilityIndex float64 `json:"mean_maintainability_index"`
TotalTechnicalDebtHours float64 `json:"total_technical_debt_hours"`
HealthiestRepo string `json:"healthiest_repo"`
MostTechnicalDebtRepo string `json:"most_technical_debt_repo"`
LanguageHealthBreakdown []LanguageHealthMetric `json:"language_health_breakdown"`
}
AggregatedCHIMetrics represents CHI metrics across repositories
type AggregatedDORAMetrics ¶
type AggregatedDORAMetrics struct {
MeanLeadTimeP95Hours float64 `json:"mean_lead_time_p95_hours"`
MeanDeploymentFrequencyWeek float64 `json:"mean_deployment_frequency_per_week"`
MeanChangeFailRatePercent float64 `json:"mean_change_fail_rate_pct"`
MeanMTTRHours float64 `json:"mean_mttr_hours"`
TotalDeployments int `json:"total_deployments"`
TotalIncidents int `json:"total_incidents"`
BestPerformingRepo string `json:"best_performing_repo"`
WorstPerformingRepo string `json:"worst_performing_repo"`
Percentiles DORAPercentiles `json:"percentiles"`
}
AggregatedDORAMetrics represents DORA metrics across repositories
type AggregatedMetrics ¶
type AggregatedMetrics struct {
Repositories []types.Repository `json:"repositories"`
TimeRange TimeRange `json:"time_range"`
AggregatedDORA AggregatedDORAMetrics `json:"aggregated_dora"`
AggregatedCHI AggregatedCHIMetrics `json:"aggregated_chi"`
AggregatedAI AggregatedAIMetrics `json:"aggregated_ai"`
CrossRepoInsights CrossRepoInsights `json:"cross_repo_insights"`
OrganizationalHealth OrganizationalHealth `json:"organizational_health"`
CacheInfo CacheInfo `json:"cache_info"`
}
AggregatedMetrics represents cross-repository aggregated metrics
type AuthProvider ¶
AuthProvider interface for getting authentication tokens
type CHICalculator ¶
type CHICalculator struct {
// contains filtered or unexported fields
}
CHICalculator calculates Code Health Index metrics
func NewCHICalculator ¶
func NewCHICalculator(repoPath string) *CHICalculator
NewCHICalculator creates a new CHI calculator
func (*CHICalculator) Calculate ¶
func (c *CHICalculator) Calculate(ctx context.Context, repo types.Repository) (*types.CHIMetrics, error)
Calculate computes Code Health Index for a repository
type CHITrendAnalysis ¶
type CHITrendAnalysis struct {
ScoreTrend string `json:"score_trend"` // "improving", "stable", "declining"
ComplexityTrend string `json:"complexity_trend"`
TestCoverageTrend string `json:"test_coverage_trend"`
TechnicalDebtTrend string `json:"technical_debt_trend"`
MonthlyScoreChange float64 `json:"monthly_score_change"`
RecommendedActions []string `json:"recommended_actions,omitempty"`
}
CHITrendAnalysis represents CHI trends over time
type CacheConfig ¶
type CacheConfig struct {
DefaultTTL time.Duration `json:"default_ttl"`
MaxSize int `json:"max_size"`
CleanupInterval time.Duration `json:"cleanup_interval"`
MetricTypeTTLs map[string]time.Duration `json:"metric_type_ttls"`
EnableStats bool `json:"enable_stats"`
PersistentCache bool `json:"persistent_cache"`
}
CacheConfig configures the metrics cache
type CacheEntry ¶
type CacheEntry struct {
Key string `json:"key"`
Data interface{} `json:"data"`
CachedAt time.Time `json:"cached_at"`
ExpiresAt time.Time `json:"expires_at"`
TTL time.Duration `json:"ttl"`
AccessCount int `json:"access_count"`
LastAccessed time.Time `json:"last_accessed"`
DataSize int `json:"data_size"`
ComputeTime time.Duration `json:"compute_time"`
Tags []string `json:"tags"`
}
CacheEntry represents a cached metrics entry
type CacheInfo ¶
type CacheInfo struct {
CacheHit bool `json:"cache_hit"`
CacheKey string `json:"cache_key,omitempty"`
CachedAt *time.Time `json:"cached_at,omitempty"`
TTL time.Duration `json:"ttl,omitempty"`
ExpiresAt *time.Time `json:"expires_at,omitempty"`
ComputeTimeMs int64 `json:"compute_time_ms"`
DataSources []string `json:"data_sources,omitempty"`
}
CacheInfo represents cache metadata
type CacheMiddleware ¶
type CacheMiddleware struct {
// contains filtered or unexported fields
}
CacheMiddleware provides caching for metrics calculators
func NewCacheMiddleware ¶
func NewCacheMiddleware(cache *MetricsCache) *CacheMiddleware
NewCacheMiddleware creates a new cache middleware
func (*CacheMiddleware) CacheOrCompute ¶
func (cm *CacheMiddleware) CacheOrCompute( ctx context.Context, metricType string, request MetricsRequest, computeFunc func() (interface{}, error), ) (interface{}, CacheInfo, error)
CacheOrCompute executes computation with caching
func (*CacheMiddleware) GetCacheStats ¶
func (cm *CacheMiddleware) GetCacheStats() CacheStats
GetCacheStats returns cache performance statistics
func (*CacheMiddleware) InvalidateMetricTypeCache ¶
func (cm *CacheMiddleware) InvalidateMetricTypeCache(ctx context.Context, metricType string) int
InvalidateMetricTypeCache invalidates all cache entries for a metric type
func (*CacheMiddleware) InvalidateRepositoryCache ¶
func (cm *CacheMiddleware) InvalidateRepositoryCache(ctx context.Context, repoFullName string) int
InvalidateRepositoryCache invalidates all cache entries for a repository
func (*CacheMiddleware) WarmupCache ¶
func (cm *CacheMiddleware) WarmupCache(ctx context.Context, repositories []string, metricTypes []string) error
WarmupCache pre-computes and caches common metrics
type CacheStats ¶
type CacheStats struct {
Hits int64 `json:"hits"`
Misses int64 `json:"misses"`
Evictions int64 `json:"evictions"`
TotalEntries int `json:"total_entries"`
TotalSize int `json:"total_size"`
HitRate float64 `json:"hit_rate"`
AverageLatency time.Duration `json:"average_latency"`
LastReset time.Time `json:"last_reset"`
}
CacheStats tracks cache performance metrics
type CodeFile ¶
type CodeFile struct {
Path string
Language string
Lines int
LinesOfCode int
CyclomaticComplexity int
Functions int
TestFile bool
Duplications []Duplication
}
CodeFile represents a source code file analysis
type CodeQualityImpact ¶
type CodeQualityImpact struct {
BugDensityReduction float64 `json:"bug_density_reduction"`
TestCoverageImprovement float64 `json:"test_coverage_improvement"`
CodeComplexityChange float64 `json:"code_complexity_change"`
RefactoringFrequency float64 `json:"refactoring_frequency"`
CodeReviewPassRate float64 `json:"code_review_pass_rate"`
SecurityVulnerabilities int `json:"security_vulnerabilities"`
}
CodeQualityImpact represents AI impact on code quality
type CodingTime ¶
type CodingTime struct {
TotalHours float64 `json:"total_hours"`
CodingHours float64 `json:"coding_hours"`
Period int `json:"period_days"`
Languages []LanguageTime `json:"languages"`
Projects []ProjectTime `json:"projects"`
}
CodingTime represents time tracking data from WakaTime
type Collaborators ¶
type Commit ¶
type Commit struct {
SHA string `json:"sha"`
Message string `json:"message"`
Author string `json:"author"`
Date time.Time `json:"date"`
Files []string `json:"files"`
Additions int `json:"additions"`
Deletions int `json:"deletions"`
// AI assistance indicators
CoAuthoredBy []string `json:"co_authored_by"`
AIAssisted bool `json:"ai_assisted"`
AIProvider string `json:"ai_provider"`
}
Commit represents a Git commit with AI assistance indicators
type CommitComment ¶
type CommitComments ¶
type CommitComments struct {
TotalCount int `json:"totalCount"`
Nodes []CommitComment `json:"nodes"`
}
type ComplexityHotspot ¶
type ComplexityHotspot struct {
File string `json:"file"`
Function string `json:"function"`
CyclomaticComplexity int `json:"cyclomatic_complexity"`
LinesOfCode int `json:"lines_of_code"`
EstimatedRefactorHours float64 `json:"estimated_refactor_hours"`
Priority string `json:"priority"` // "critical", "high", "medium", "low"
}
ComplexityHotspot represents high-complexity code areas
type CrossRepoInsights ¶
type CrossRepoInsights struct {
CommonPatterns []string `json:"common_patterns"`
BestPracticesSharing []string `json:"best_practices_sharing"`
KnowledgeTransferOpps []string `json:"knowledge_transfer_opportunities"`
StandardizationOpps []string `json:"standardization_opportunities"`
CollaborationHotspots []string `json:"collaboration_hotspots"`
}
CrossRepoInsights represents insights across repositories
type DORACalculator ¶
type DORACalculator struct {
// contains filtered or unexported fields
}
DORACalculator calculates DevOps Research and Assessment metrics
func NewDORACalculator ¶
func NewDORACalculator(github GitHubClient, jira JiraClient) *DORACalculator
NewDORACalculator creates a new DORA metrics calculator
func (*DORACalculator) Calculate ¶
func (d *DORACalculator) Calculate(ctx context.Context, repo types.Repository, periodDays int) (*types.DORAMetrics, error)
Calculate computes DORA metrics for a repository
type DORAConfig ¶
type DORAConfig struct {
DefaultTimezone string `json:"default_timezone"`
BusinessHoursStart int `json:"business_hours_start"` // 9 AM
BusinessHoursEnd int `json:"business_hours_end"` // 5 PM
ExcludeWeekends bool `json:"exclude_weekends"`
IncidentThresholdHours float64 `json:"incident_threshold_hours"` // Time before considering it an incident
EnableGraphQL bool `json:"enable_graphql"`
CacheEnabled bool `json:"cache_enabled"`
DefaultCacheTTL time.Duration `json:"default_cache_ttl"`
MaxDataPoints int `json:"max_data_points"`
}
DORAConfig configures the enhanced DORA calculator
type DORAPercentiles ¶
type DORAPercentiles struct {
LeadTimeP50 float64 `json:"lead_time_p50"`
LeadTimeP75 float64 `json:"lead_time_p75"`
LeadTimeP90 float64 `json:"lead_time_p90"`
LeadTimeP95 float64 `json:"lead_time_p95"`
DeployFreqP50 float64 `json:"deploy_freq_p50"`
DeployFreqP75 float64 `json:"deploy_freq_p75"`
DeployFreqP90 float64 `json:"deploy_freq_p90"`
ChangeFailRateP50 float64 `json:"change_fail_rate_p50"`
ChangeFailRateP75 float64 `json:"change_fail_rate_p75"`
MTTRP50 float64 `json:"mttr_p50"`
MTTRP75 float64 `json:"mttr_p75"`
}
DORAPercentiles represents percentile analysis of DORA metrics
type DORATimeSeriesPoint ¶
type DORATimeSeriesPoint struct {
Timestamp time.Time `json:"timestamp"`
LeadTimeHours float64 `json:"lead_time_hours"`
DeploymentCount int `json:"deployment_count"`
FailureCount int `json:"failure_count"`
RecoveryTimeHours float64 `json:"recovery_time_hours"`
ChangeFailureRate float64 `json:"change_failure_rate"`
}
DORATimeSeriesPoint represents a point in DORA metrics time series
type DataQuality ¶
type DataQuality struct {
Completeness float64 `json:"completeness"` // 0.0-1.0
Accuracy float64 `json:"accuracy"` // 0.0-1.0
Timeliness float64 `json:"timeliness"` // 0.0-1.0
Consistency float64 `json:"consistency"` // 0.0-1.0
DataPoints int `json:"data_points"`
MissingData int `json:"missing_data"`
QualityWarnings []string `json:"quality_warnings,omitempty"`
}
DataQuality represents data quality metrics
type Deployment ¶
type Deployment struct {
ID int `json:"id"`
Environment string `json:"environment"`
State string `json:"state"` // success, failure, error, pending
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
SHA string `json:"sha"`
}
Deployment represents a deployment event
type DeploymentStatus ¶
type DeploymentStatuses ¶
type DeploymentStatuses struct {
TotalCount int `json:"totalCount"`
Nodes []DeploymentStatus `json:"nodes"`
}
type DeploymentTrend ¶
type DeploymentTrend struct {
Period string `json:"period"` // "week", "month", "quarter"
DeploymentCount int `json:"deployment_count"`
SuccessRate float64 `json:"success_rate"`
AverageLeadTime float64 `json:"average_lead_time_hours"`
TrendDirection string `json:"trend_direction"` // "improving", "stable", "declining"
}
DeploymentTrend represents deployment frequency trends
type Deployments ¶
type Deployments struct {
TotalCount int `json:"totalCount"`
Nodes []GraphQLDeployment `json:"nodes"`
}
type DeveloperEfficiency ¶
type DeveloperEfficiency struct {
FocusTimeHours float64 `json:"focus_time_hours"`
InterruptionFrequency float64 `json:"interruption_frequency"`
ContextSwitchingPenalty float64 `json:"context_switching_penalty"`
FlowStateAchievement float64 `json:"flow_state_achievement"`
LearningCurveReduction float64 `json:"learning_curve_reduction"`
OnboardingTimeReduction float64 `json:"onboarding_time_reduction"`
}
DeveloperEfficiency represents developer efficiency metrics
type Duplication ¶
Duplication represents code duplication detection
type EnhancedAIMetrics ¶
type EnhancedAIMetrics struct {
types.AIMetrics
TimeRange TimeRange `json:"time_range"`
Granularity string `json:"granularity"`
Timezone string `json:"timezone"`
AIToolBreakdown []AIToolUsage `json:"ai_tool_breakdown,omitempty"`
ProductivityMetrics ProductivityMetrics `json:"productivity_metrics"`
CodeQualityImpact CodeQualityImpact `json:"code_quality_impact"`
DeveloperEfficiency DeveloperEfficiency `json:"developer_efficiency"`
AIAssistanceTimeline []AIAssistancePoint `json:"ai_assistance_timeline,omitempty"`
HumanVsAIContributions HumanVsAIContributions `json:"human_vs_ai_contributions"`
Confidence float64 `json:"confidence"`
DataQuality DataQuality `json:"data_quality"`
CacheInfo CacheInfo `json:"cache_info"`
}
EnhancedAIMetrics extends AIMetrics with detailed AI assistance analysis
type EnhancedCHIMetrics ¶
type EnhancedCHIMetrics struct {
types.CHIMetrics
TimeRange TimeRange `json:"time_range"`
Granularity string `json:"granularity"`
Timezone string `json:"timezone"`
FileMetrics []FileMetric `json:"file_metrics,omitempty"`
LanguageBreakdown []LanguageMetric `json:"language_breakdown,omitempty"`
ComplexityHotspots []ComplexityHotspot `json:"complexity_hotspots,omitempty"`
TechnicalDebtItems []TechnicalDebtItem `json:"technical_debt_items,omitempty"`
TestCoverageDetail TestCoverageDetail `json:"test_coverage_detail"`
Trends CHITrendAnalysis `json:"trends"`
Confidence float64 `json:"confidence"`
DataQuality DataQuality `json:"data_quality"`
CacheInfo CacheInfo `json:"cache_info"`
}
EnhancedCHIMetrics extends CHIMetrics with detailed analysis
type EnhancedDORACalculator ¶
type EnhancedDORACalculator struct {
// contains filtered or unexported fields
}
EnhancedDORACalculator calculates DORA metrics with advanced features
func NewEnhancedDORACalculator ¶
func NewEnhancedDORACalculator( githubClient GitHubClient, graphqlClient *GraphQLClient, cache *CacheMiddleware, config DORAConfig, ) *EnhancedDORACalculator
NewEnhancedDORACalculator creates a new enhanced DORA calculator
func (*EnhancedDORACalculator) Calculate ¶
func (edc *EnhancedDORACalculator) Calculate(ctx context.Context, request MetricsRequest) (*EnhancedDORAMetrics, error)
Calculate computes enhanced DORA metrics
type EnhancedDORAMetrics ¶
type EnhancedDORAMetrics struct {
types.DORAMetrics
TimeRange TimeRange `json:"time_range"`
Granularity string `json:"granularity"`
Timezone string `json:"timezone"`
IncidentCount int `json:"incident_count"`
FailedDeployments int `json:"failed_deployments"`
TotalDeployments int `json:"total_deployments"`
MeanLeadTimeHours float64 `json:"mean_lead_time_hours"`
MedianLeadTimeHours float64 `json:"median_lead_time_hours"`
TimeSeries []DORATimeSeriesPoint `json:"time_series,omitempty"`
IncidentBreakdown []IncidentClassification `json:"incident_breakdown,omitempty"`
DeploymentTrends []DeploymentTrend `json:"deployment_trends,omitempty"`
Confidence float64 `json:"confidence"`
DataQuality DataQuality `json:"data_quality"`
CacheInfo CacheInfo `json:"cache_info"`
}
EnhancedDORAMetrics extends DORAMetrics with timezone and granularity support
type FileMetric ¶
type FileMetric struct {
Path string `json:"path"`
Language string `json:"language"`
LinesOfCode int `json:"lines_of_code"`
CyclomaticComplexity int `json:"cyclomatic_complexity"`
TestCoverage float64 `json:"test_coverage"`
DuplicationScore float64 `json:"duplication_score"`
MaintainabilityIndex float64 `json:"maintainability_index"`
TechnicalDebtHours float64 `json:"technical_debt_hours"`
LastModified time.Time `json:"last_modified"`
}
FileMetric represents metrics for a single file
type GitClient ¶
type GitClient interface {
GetCommits(ctx context.Context, owner, repo string, since time.Time) ([]Commit, error)
}
GitClient interface for Git commit analysis
type GitHubClient ¶
type GitHubClient interface {
GetPullRequests(ctx context.Context, owner, repo string, since time.Time) ([]PullRequest, error)
GetDeployments(ctx context.Context, owner, repo string, since time.Time) ([]Deployment, error)
GetWorkflowRuns(ctx context.Context, owner, repo string, since time.Time) ([]WorkflowRun, error)
}
GitHubClient interface for repository data access
type GraphQLClient ¶
type GraphQLClient struct {
// contains filtered or unexported fields
}
GraphQLClient provides GraphQL queries for complex metrics aggregations
func NewGraphQLClient ¶
func NewGraphQLClient(authProvider AuthProvider, baseURL string) *GraphQLClient
NewGraphQLClient creates a new GraphQL client
func (*GraphQLClient) GetMultipleRepositoryMetrics ¶
func (gc *GraphQLClient) GetMultipleRepositoryMetrics(ctx context.Context, repositories []string, since time.Time) (map[string]*RepositoryMetricsData, error)
GetMultipleRepositoryMetrics fetches metrics for multiple repositories in a single query
func (*GraphQLClient) GetOrganizationRepositories ¶
func (gc *GraphQLClient) GetOrganizationRepositories(ctx context.Context, org string, limit int) ([]string, error)
GetOrganizationRepositories fetches all repositories for an organization
func (*GraphQLClient) GetRepositoryMetrics ¶
func (gc *GraphQLClient) GetRepositoryMetrics(ctx context.Context, owner, repo string, since time.Time) (*RepositoryMetricsData, error)
GetRepositoryMetrics fetches comprehensive repository metrics using GraphQL
type GraphQLCommit ¶
type GraphQLCommit struct {
Oid string `json:"oid"`
Message string `json:"message"`
CommittedDate time.Time `json:"committedDate"`
Author GitActor `json:"author"`
Committer GitActor `json:"committer"`
Additions int `json:"additions"`
Deletions int `json:"deletions"`
ChangedFiles int `json:"changedFiles"`
AssociatedPullRequests PullRequestConnection `json:"associatedPullRequests"`
}
type GraphQLDeployment ¶
type GraphQLDeployment struct {
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
Environment string `json:"environment"`
State string `json:"state"`
Description string `json:"description"`
Creator User `json:"creator"`
Ref Reference `json:"ref"`
Statuses DeploymentStatuses `json:"statuses"`
}
type GraphQLError ¶
type GraphQLError struct {
Message string `json:"message"`
Locations []Location `json:"locations,omitempty"`
Path []string `json:"path,omitempty"`
Extensions interface{} `json:"extensions,omitempty"`
}
GraphQLError represents a GraphQL error
type GraphQLIssue ¶
type GraphQLIssue struct {
Number int `json:"number"`
Title string `json:"title"`
State string `json:"state"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
ClosedAt *time.Time `json:"closedAt"`
Author User `json:"author"`
Labels Labels `json:"labels"`
Assignees Assignees `json:"assignees"`
Comments Comments `json:"comments"`
}
type GraphQLPullRequest ¶
type GraphQLPullRequest struct {
Number int `json:"number"`
Title string `json:"title"`
State string `json:"state"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
MergedAt *time.Time `json:"mergedAt"`
ClosedAt *time.Time `json:"closedAt"`
Author User `json:"author"`
Mergeable string `json:"mergeable"`
Additions int `json:"additions"`
Deletions int `json:"deletions"`
ChangedFiles int `json:"changedFiles"`
Reviews Reviews `json:"reviews"`
Comments Comments `json:"comments"`
Commits PullRequestCommits `json:"commits"`
Labels Labels `json:"labels"`
Assignees Assignees `json:"assignees"`
}
type GraphQLRequest ¶
type GraphQLRequest struct {
Query string `json:"query"`
Variables map[string]interface{} `json:"variables,omitempty"`
}
GraphQLRequest represents a GraphQL query request
type GraphQLResponse ¶
type GraphQLResponse struct {
Data json.RawMessage `json:"data"`
Errors []GraphQLError `json:"errors,omitempty"`
}
GraphQLResponse represents a GraphQL query response
type History ¶
type History struct {
TotalCount int `json:"totalCount"`
Nodes []GraphQLCommit `json:"nodes"`
}
type HumanVsAIContributions ¶
type HumanVsAIContributions struct {
HumanCommits int `json:"human_commits"`
AIAssistedCommits int `json:"ai_assisted_commits"`
HumanLinesAdded int `json:"human_lines_added"`
AILinesAdded int `json:"ai_lines_added"`
HumanTestsWritten int `json:"human_tests_written"`
AITestsWritten int `json:"ai_tests_written"`
HumanBugsFixed int `json:"human_bugs_fixed"`
AIBugsFixed int `json:"ai_bugs_fixed"`
CollaborationScore float64 `json:"collaboration_score"` // How well human and AI work together
}
HumanVsAIContributions represents the breakdown of human vs AI contributions
type IDEClient ¶
type IDEClient interface {
GetAIAssistData(ctx context.Context, user, repo string, since time.Time) (*AIAssistData, error)
}
IDEClient interface for IDE telemetry data
type IncidentClassification ¶
type IncidentClassification struct {
Type string `json:"type"` // "deployment_failure", "hotfix", "rollback", "outage"
Severity string `json:"severity"` // "critical", "high", "medium", "low"
Count int `json:"count"`
MeanResolutionTime time.Duration `json:"mean_resolution_time"`
TotalDowntimeHours float64 `json:"total_downtime_hours"`
AffectedDeployments []string `json:"affected_deployments,omitempty"`
}
IncidentClassification represents incident analysis
type Issue ¶
type Issue struct {
Key string `json:"key"`
Type string `json:"type"`
Status string `json:"status"`
Priority string `json:"priority"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
ResolvedAt *time.Time `json:"resolved_at"`
}
Issue represents a Jira issue
type Issues ¶
type Issues struct {
TotalCount int `json:"totalCount"`
Nodes []GraphQLIssue `json:"nodes"`
}
type JiraClient ¶
type JiraClient interface {
GetIssues(ctx context.Context, project string, since time.Time) ([]Issue, error)
}
JiraClient interface for issue tracking data
type LanguageEdge ¶
type LanguageHealthMetric ¶
type LanguageHealthMetric struct {
Language string `json:"language"`
RepositoryCount int `json:"repository_count"`
AverageCHIScore int `json:"average_chi_score"`
AverageComplexity float64 `json:"average_complexity"`
AverageTestCoverage float64 `json:"average_test_coverage"`
TotalTechnicalDebtHours float64 `json:"total_technical_debt_hours"`
HealthRanking int `json:"health_ranking"`
}
LanguageHealthMetric represents health metrics per language across repos
type LanguageMetric ¶
type LanguageMetric struct {
Language string `json:"language"`
FileCount int `json:"file_count"`
TotalLinesOfCode int `json:"total_lines_of_code"`
AverageComplexity float64 `json:"average_complexity"`
TestCoverage float64 `json:"test_coverage"`
TechnicalDebtHours float64 `json:"technical_debt_hours"`
}
LanguageMetric represents metrics per programming language
type LanguageTime ¶
type Languages ¶
type Languages struct {
TotalSize int `json:"totalSize"`
Edges []LanguageEdge `json:"edges"`
}
type MetricsCache ¶
type MetricsCache struct {
// contains filtered or unexported fields
}
MetricsCache provides caching functionality for calculated metrics
func NewMetricsCache ¶
func NewMetricsCache(config CacheConfig) *MetricsCache
NewMetricsCache creates a new metrics cache
func (*MetricsCache) Clear ¶
func (mc *MetricsCache) Clear(ctx context.Context)
Clear removes all entries from cache
func (*MetricsCache) Delete ¶
func (mc *MetricsCache) Delete(ctx context.Context, key string)
Delete removes an entry from cache
func (*MetricsCache) GenerateCacheKey ¶
func (mc *MetricsCache) GenerateCacheKey(metricType string, request MetricsRequest) string
GenerateCacheKey generates a cache key for metrics request
func (*MetricsCache) Get ¶
func (mc *MetricsCache) Get(ctx context.Context, key string) (*CacheEntry, bool)
Get retrieves a cached metric
func (*MetricsCache) GetStats ¶
func (mc *MetricsCache) GetStats() CacheStats
GetStats returns cache performance statistics
func (*MetricsCache) GetTTLForMetricType ¶
func (mc *MetricsCache) GetTTLForMetricType(metricType string) time.Duration
GetTTLForMetricType returns the TTL for a specific metric type
func (*MetricsCache) InvalidateByTags ¶
func (mc *MetricsCache) InvalidateByTags(ctx context.Context, tags []string) int
InvalidateByTags removes all entries with matching tags
func (*MetricsCache) ResetStats ¶
func (mc *MetricsCache) ResetStats()
ResetStats resets cache statistics
type MetricsRequest ¶
type MetricsRequest struct {
Repository types.Repository `json:"repository"`
TimeRange TimeRange `json:"time_range"`
Granularity string `json:"granularity"` // "hour", "day", "week", "month"
UseCache bool `json:"use_cache"`
CacheTTL time.Duration `json:"cache_ttl"`
}
MetricsRequest represents a request for metrics calculation
type OrganizationalHealth ¶
type OrganizationalHealth struct {
DeliveryMaturity string `json:"delivery_maturity"` // "elite", "high", "medium", "low"
CodeHealthMaturity string `json:"code_health_maturity"`
AIAdoptionMaturity string `json:"ai_adoption_maturity"`
DevExperienceScore float64 `json:"dev_experience_score"`
InnovationIndex float64 `json:"innovation_index"`
ScalingReadiness float64 `json:"scaling_readiness"`
TalentRetentionRisk string `json:"talent_retention_risk"`
CompetitiveAdvantage string `json:"competitive_advantage"`
StrategicRecommendations []string `json:"strategic_recommendations"`
}
OrganizationalHealth represents organization-wide health metrics
type ProductivityMetrics ¶
type ProductivityMetrics struct {
CommitsPerHour float64 `json:"commits_per_hour"`
LinesPerHour float64 `json:"lines_per_hour"`
FeaturesPerSprint float64 `json:"features_per_sprint"`
BugsPerFeature float64 `json:"bugs_per_feature"`
TimeToFirstReview float64 `json:"time_to_first_review_hours"`
CodeReviewCycles float64 `json:"code_review_cycles"`
HumanOnlyProductivity float64 `json:"human_only_productivity"`
AIAssistedProductivity float64 `json:"ai_assisted_productivity"`
ProductivityImprovement float64 `json:"productivity_improvement"` // Percentage
}
ProductivityMetrics represents productivity analysis
type ProjectTime ¶
type PullRequest ¶
type PullRequest struct {
Number int `json:"number"`
Title string `json:"title"`
State string `json:"state"` // open, closed, merged
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
MergedAt *time.Time `json:"merged_at"`
ClosedAt *time.Time `json:"closed_at"`
Commits int `json:"commits"`
Additions int `json:"additions"`
Deletions int `json:"deletions"`
ChangedFiles int `json:"changed_files"`
FirstReviewAt *time.Time `json:"first_review_at"`
}
PullRequest represents a GitHub pull request
type PullRequestCommits ¶
type PullRequestCommits struct {
TotalCount int `json:"totalCount"`
Nodes []GraphQLCommit `json:"nodes"`
}
type PullRequestConnection ¶
type PullRequestConnection struct {
TotalCount int `json:"totalCount"`
Nodes []GraphQLPullRequest `json:"nodes"`
}
type PullRequests ¶
type PullRequests struct {
TotalCount int `json:"totalCount"`
Nodes []GraphQLPullRequest `json:"nodes"`
}
type RepositoryMetricsData ¶
type RepositoryMetricsData struct {
Repository struct {
Name string `json:"name"`
Owner Owner `json:"owner"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
PrimaryLanguage Language `json:"primaryLanguage"`
Languages Languages `json:"languages"`
DefaultBranchRef BranchRef `json:"defaultBranchRef"`
PullRequests PullRequests `json:"pullRequests"`
Issues Issues `json:"issues"`
Releases Releases `json:"releases"`
Deployments Deployments `json:"deployments"`
Collaborators Collaborators `json:"collaborators"`
CommitComments CommitComments `json:"commitComments"`
DiskUsage int `json:"diskUsage"`
ForkCount int `json:"forkCount"`
StargazerCount int `json:"stargazerCount"`
WatcherCount int `json:"watchers"`
} `json:"repository"`
}
RepositoryMetricsData represents comprehensive repository data from GraphQL
type TechnicalDebtItem ¶
type TechnicalDebtItem struct {
Type string `json:"type"` // "complexity", "duplication", "test_coverage", "maintainability"
Description string `json:"description"`
Location string `json:"location"`
EstimatedEffortHours float64 `json:"estimated_effort_hours"`
ImpactLevel string `json:"impact_level"` // "critical", "high", "medium", "low"
RecommendedAction string `json:"recommended_action"`
}
TechnicalDebtItem represents a specific technical debt item
type TestCoverageDetail ¶
type TestCoverageDetail struct {
LinesCovered int `json:"lines_covered"`
LinesTotal int `json:"lines_total"`
BranchesCovered int `json:"branches_covered"`
BranchesTotal int `json:"branches_total"`
FunctionsCovered int `json:"functions_covered"`
FunctionsTotal int `json:"functions_total"`
UncoveredFiles []string `json:"uncovered_files,omitempty"`
TestFileCount int `json:"test_file_count"`
TestToCodeRatio float64 `json:"test_to_code_ratio"`
}
TestCoverageDetail represents detailed test coverage analysis
type TimeRange ¶
type TimeRange struct {
Start time.Time `json:"start"`
End time.Time `json:"end"`
Timezone string `json:"timezone"` // IANA timezone (e.g., "America/New_York")
}
TimeRange represents a time period with timezone support
type TimeUtils ¶
type TimeUtils struct {
// contains filtered or unexported fields
}
TimeUtils provides utilities for timezone-aware time handling
func NewTimeUtils ¶
NewTimeUtils creates a new TimeUtils instance
func (*TimeUtils) CalculateBusinessDays ¶
CalculateBusinessDays returns the number of business days between two dates
func (*TimeUtils) CalculateWorkingHours ¶
func (tu *TimeUtils) CalculateWorkingHours(start, end time.Time, timezone string, startHour, endHour int, excludeWeekends bool) (float64, error)
CalculateWorkingHours calculates working hours between two times
func (*TimeUtils) ConvertFromUTC ¶
ConvertFromUTC converts a UTC time to a specific timezone
func (*TimeUtils) ConvertToUTC ¶
ConvertToUTC converts a time to UTC
func (*TimeUtils) FormatTimeForTimezone ¶
FormatTimeForTimezone formats a time for display in a specific timezone
func (*TimeUtils) GetBusinessHours ¶
func (tu *TimeUtils) GetBusinessHours(timeRange TimeRange, startHour, endHour int, excludeWeekends bool) ([]TimeRange, error)
GetBusinessHours filters time ranges to business hours only
func (*TimeUtils) GetCommonTimezones ¶
GetCommonTimezones returns a list of common timezone identifiers
func (*TimeUtils) GetNextBusinessDay ¶
GetNextBusinessDay returns the next business day
func (*TimeUtils) GetPeriodBoundaries ¶
func (tu *TimeUtils) GetPeriodBoundaries(baseTime time.Time, granularity string, timezone string, periodsBack int) ([]TimeRange, error)
GetPeriodBoundaries calculates period boundaries based on granularity
func (*TimeUtils) GetPeriodDuration ¶
GetPeriodDuration returns the duration for a granularity period
func (*TimeUtils) GetTimezoneOffset ¶
GetTimezoneOffset returns the UTC offset for a timezone at a specific time
func (*TimeUtils) IsBusinessDay ¶
IsBusinessDay checks if a date is a business day (Monday-Friday)
func (*TimeUtils) ParseTimeRange ¶
ParseTimeRange parses a time range with timezone support
func (*TimeUtils) ValidateTimezone ¶
ValidateTimezone checks if a timezone is valid
type WakaTimeClient ¶
type WakaTimeClient interface {
GetCodingTime(ctx context.Context, user, repo string, since time.Time) (*CodingTime, error)
}
WakaTimeClient interface for time tracking data
type WorkflowRun ¶
type WorkflowRun struct {
ID int `json:"id"`
Name string `json:"name"`
Status string `json:"status"` // completed, in_progress, queued
Conclusion string `json:"conclusion"` // success, failure, cancelled, skipped
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
SHA string `json:"sha"`
}
WorkflowRun represents a CI/CD pipeline run