Initial release: Go Jdenticon library v0.1.0
- Core library with SVG and PNG generation - CLI tool with generate and batch commands - Cross-platform path handling for Windows compatibility - Comprehensive test suite with integration tests
This commit is contained in:
43
internal/perfsuite/regression_test.go
Normal file
43
internal/perfsuite/regression_test.go
Normal file
@@ -0,0 +1,43 @@
|
||||
//go:build perf
|
||||
|
||||
package perfsuite_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/ungluedlabs/go-jdenticon/internal/perfsuite"
|
||||
)
|
||||
|
||||
// TestPerformanceRegressionSuite can be called from a regular Go test
|
||||
func TestPerformanceRegressionSuite(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("Skipping performance regression tests in short mode")
|
||||
}
|
||||
|
||||
suite := perfsuite.NewPerformanceSuite()
|
||||
suite.FailOnRegress = false // Don't fail tests, just report
|
||||
|
||||
// Check if we should establish baselines
|
||||
if os.Getenv("ESTABLISH_BASELINES") == "true" {
|
||||
if err := suite.EstablishBaselines(); err != nil {
|
||||
t.Fatalf("Failed to establish baselines: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Run regression check
|
||||
if err := suite.CheckForRegressions(); err != nil {
|
||||
t.Logf("Performance regression check completed with issues: %v", err)
|
||||
// Don't fail the test, just log the results
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkPerformanceSuite runs all performance benchmarks for standard Go bench testing
|
||||
func BenchmarkPerformanceSuite(b *testing.B) {
|
||||
suite := perfsuite.NewPerformanceSuite()
|
||||
|
||||
for _, bench := range suite.Benchmarks {
|
||||
b.Run(bench.Name, bench.BenchmarkFunc)
|
||||
}
|
||||
}
|
||||
469
internal/perfsuite/suite.go
Normal file
469
internal/perfsuite/suite.go
Normal file
@@ -0,0 +1,469 @@
|
||||
package perfsuite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/ungluedlabs/go-jdenticon/jdenticon"
|
||||
)
|
||||
|
||||
// PerformanceBenchmark represents a single performance test case
|
||||
type PerformanceBenchmark struct {
|
||||
Name string
|
||||
BenchmarkFunc func(*testing.B)
|
||||
RegressionLimit float64 // Percentage threshold for regression detection
|
||||
Description string
|
||||
}
|
||||
|
||||
// PerformanceMetrics holds performance metrics for comparison
|
||||
type PerformanceMetrics struct {
|
||||
NsPerOp int64 `json:"ns_per_op"`
|
||||
AllocsPerOp int64 `json:"allocs_per_op"`
|
||||
BytesPerOp int64 `json:"bytes_per_op"`
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
GoVersion string `json:"go_version"`
|
||||
OS string `json:"os"`
|
||||
Arch string `json:"arch"`
|
||||
}
|
||||
|
||||
// RegressionReport holds the results of a regression check
|
||||
type RegressionReport struct {
|
||||
Summary string `json:"summary"`
|
||||
Failures []string `json:"failures"`
|
||||
Passed int `json:"passed"`
|
||||
Total int `json:"total"`
|
||||
Results map[string]string `json:"results"`
|
||||
}
|
||||
|
||||
// PerformanceSuite manages the performance regression test suite
|
||||
type PerformanceSuite struct {
|
||||
Benchmarks []PerformanceBenchmark
|
||||
BaselineFile string
|
||||
ReportFile string
|
||||
EnableReports bool
|
||||
FailOnRegress bool
|
||||
}
|
||||
|
||||
// NewPerformanceSuite creates a new performance regression test suite
|
||||
func NewPerformanceSuite() *PerformanceSuite {
|
||||
return &PerformanceSuite{
|
||||
Benchmarks: []PerformanceBenchmark{
|
||||
{
|
||||
Name: "CoreSVGGeneration",
|
||||
BenchmarkFunc: benchmarkCoreSVGGeneration,
|
||||
RegressionLimit: 15.0,
|
||||
Description: "Core SVG generation performance",
|
||||
},
|
||||
{
|
||||
Name: "CorePNGGeneration",
|
||||
BenchmarkFunc: benchmarkCorePNGGeneration,
|
||||
RegressionLimit: 25.0,
|
||||
Description: "Core PNG generation performance",
|
||||
},
|
||||
{
|
||||
Name: "CachedGeneration",
|
||||
BenchmarkFunc: benchmarkCachedGeneration,
|
||||
RegressionLimit: 10.0,
|
||||
Description: "Cached icon generation performance",
|
||||
},
|
||||
{
|
||||
Name: "BatchProcessing",
|
||||
BenchmarkFunc: benchmarkBatchProcessing,
|
||||
RegressionLimit: 20.0,
|
||||
Description: "Batch icon generation performance",
|
||||
},
|
||||
{
|
||||
Name: "LargeIcon256",
|
||||
BenchmarkFunc: benchmarkLargeIcon256,
|
||||
RegressionLimit: 30.0,
|
||||
Description: "Large icon (256px) generation performance",
|
||||
},
|
||||
{
|
||||
Name: "LargeIcon512",
|
||||
BenchmarkFunc: benchmarkLargeIcon512,
|
||||
RegressionLimit: 30.0,
|
||||
Description: "Large icon (512px) generation performance",
|
||||
},
|
||||
{
|
||||
Name: "ColorVariationSaturation",
|
||||
BenchmarkFunc: benchmarkColorVariationSaturation,
|
||||
RegressionLimit: 15.0,
|
||||
Description: "Color saturation variation performance",
|
||||
},
|
||||
{
|
||||
Name: "ColorVariationPadding",
|
||||
BenchmarkFunc: benchmarkColorVariationPadding,
|
||||
RegressionLimit: 15.0,
|
||||
Description: "Padding variation performance",
|
||||
},
|
||||
},
|
||||
BaselineFile: ".performance_baselines.json",
|
||||
ReportFile: "performance_report.json",
|
||||
EnableReports: true,
|
||||
FailOnRegress: true,
|
||||
}
|
||||
}
|
||||
|
||||
// Individual benchmark functions
|
||||
|
||||
func benchmarkCoreSVGGeneration(b *testing.B) {
|
||||
testCases := []string{
|
||||
"test@example.com",
|
||||
"user123",
|
||||
"performance-test",
|
||||
"unicode-üser",
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
input := testCases[i%len(testCases)]
|
||||
_, err := jdenticon.ToSVG(context.Background(), input, 64)
|
||||
if err != nil {
|
||||
b.Fatalf("SVG generation failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkCorePNGGeneration(b *testing.B) {
|
||||
testCases := []string{
|
||||
"test@example.com",
|
||||
"user123",
|
||||
"performance-test",
|
||||
"unicode-üser",
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
input := testCases[i%len(testCases)]
|
||||
_, err := jdenticon.ToPNG(context.Background(), input, 64)
|
||||
if err != nil {
|
||||
b.Fatalf("PNG generation failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkCachedGeneration(b *testing.B) {
|
||||
generator, err := jdenticon.NewGeneratorWithConfig(jdenticon.DefaultConfig(), 100)
|
||||
if err != nil {
|
||||
b.Fatalf("NewGenerator failed: %v", err)
|
||||
}
|
||||
input := "cached-performance-test"
|
||||
|
||||
// Warm up cache
|
||||
icon, err := generator.Generate(context.Background(), input, 64)
|
||||
if err != nil {
|
||||
b.Fatalf("Cache warmup failed: %v", err)
|
||||
}
|
||||
_, _ = icon.ToSVG()
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
icon, err := generator.Generate(context.Background(), input, 64)
|
||||
if err != nil {
|
||||
b.Fatalf("Cached generation failed: %v", err)
|
||||
}
|
||||
_, err = icon.ToSVG()
|
||||
if err != nil {
|
||||
b.Fatalf("Cached SVG failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkBatchProcessing(b *testing.B) {
|
||||
inputs := []string{
|
||||
"batch1@test.com", "batch2@test.com", "batch3@test.com",
|
||||
"batch4@test.com", "batch5@test.com",
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
for _, input := range inputs {
|
||||
_, err := jdenticon.ToSVG(context.Background(), input, 64)
|
||||
if err != nil {
|
||||
b.Fatalf("Batch processing failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkLargeIcon256(b *testing.B) {
|
||||
input := "large-icon-test-256"
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := jdenticon.ToSVG(context.Background(), input, 256)
|
||||
if err != nil {
|
||||
b.Fatalf("Large icon (256px) generation failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkLargeIcon512(b *testing.B) {
|
||||
input := "large-icon-test-512"
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := jdenticon.ToSVG(context.Background(), input, 512)
|
||||
if err != nil {
|
||||
b.Fatalf("Large icon (512px) generation failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkColorVariationSaturation(b *testing.B) {
|
||||
config := jdenticon.DefaultConfig()
|
||||
config.ColorSaturation = 0.9
|
||||
|
||||
input := "color-saturation-test"
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := jdenticon.ToSVGWithConfig(context.Background(), input, 64, config)
|
||||
if err != nil {
|
||||
b.Fatalf("Color saturation variation failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkColorVariationPadding(b *testing.B) {
|
||||
config := jdenticon.DefaultConfig()
|
||||
config.Padding = 0.15
|
||||
|
||||
input := "color-padding-test"
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := jdenticon.ToSVGWithConfig(context.Background(), input, 64, config)
|
||||
if err != nil {
|
||||
b.Fatalf("Padding variation failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// calculateChange calculates percentage change between old and new values
|
||||
func calculateChange(oldVal, newVal int64) float64 {
|
||||
if oldVal == 0 {
|
||||
if newVal == 0 {
|
||||
return 0
|
||||
}
|
||||
return 100.0
|
||||
}
|
||||
return (float64(newVal-oldVal) / float64(oldVal)) * 100.0
|
||||
}
|
||||
|
||||
// RunBenchmark executes a benchmark and returns metrics
|
||||
func (ps *PerformanceSuite) RunBenchmark(bench PerformanceBenchmark) (PerformanceMetrics, error) {
|
||||
result := testing.Benchmark(bench.BenchmarkFunc)
|
||||
if result.N == 0 {
|
||||
return PerformanceMetrics{}, fmt.Errorf("benchmark %s failed to run", bench.Name)
|
||||
}
|
||||
|
||||
return PerformanceMetrics{
|
||||
NsPerOp: result.NsPerOp(),
|
||||
AllocsPerOp: result.AllocsPerOp(),
|
||||
BytesPerOp: result.AllocedBytesPerOp(),
|
||||
Timestamp: time.Now(),
|
||||
GoVersion: runtime.Version(),
|
||||
OS: runtime.GOOS,
|
||||
Arch: runtime.GOARCH,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// LoadBaselines loads performance baselines from file
|
||||
func (ps *PerformanceSuite) LoadBaselines() (map[string]PerformanceMetrics, error) {
|
||||
baselines := make(map[string]PerformanceMetrics)
|
||||
|
||||
if _, err := os.Stat(ps.BaselineFile); os.IsNotExist(err) {
|
||||
return baselines, nil
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(ps.BaselineFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read baselines: %w", err)
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(data, &baselines); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse baselines: %w", err)
|
||||
}
|
||||
|
||||
return baselines, nil
|
||||
}
|
||||
|
||||
// SaveBaselines saves performance baselines to file
|
||||
func (ps *PerformanceSuite) SaveBaselines(baselines map[string]PerformanceMetrics) error {
|
||||
data, err := json.MarshalIndent(baselines, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal baselines: %w", err)
|
||||
}
|
||||
|
||||
// #nosec G306 -- 0644 is appropriate for benchmark data files
|
||||
return os.WriteFile(ps.BaselineFile, data, 0644)
|
||||
}
|
||||
|
||||
// EstablishBaselines runs all benchmarks and saves them as baselines
|
||||
func (ps *PerformanceSuite) EstablishBaselines() error {
|
||||
fmt.Println("🔥 Establishing performance baselines...")
|
||||
baselines := make(map[string]PerformanceMetrics)
|
||||
|
||||
for _, bench := range ps.Benchmarks {
|
||||
fmt.Printf(" Running %s...", bench.Name)
|
||||
|
||||
metrics, err := ps.RunBenchmark(bench)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to run benchmark %s: %w", bench.Name, err)
|
||||
}
|
||||
|
||||
baselines[bench.Name] = metrics
|
||||
fmt.Printf(" ✓ %d ns/op, %d allocs/op\n", metrics.NsPerOp, metrics.AllocsPerOp)
|
||||
}
|
||||
|
||||
if err := ps.SaveBaselines(baselines); err != nil {
|
||||
return fmt.Errorf("failed to save baselines: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("✅ Baselines established (%d benchmarks saved to %s)\n", len(baselines), ps.BaselineFile)
|
||||
return nil
|
||||
}
|
||||
|
||||
// CheckForRegressions runs benchmarks and compares against baselines
|
||||
func (ps *PerformanceSuite) CheckForRegressions() error {
|
||||
fmt.Println("🔍 Checking for performance regressions...")
|
||||
|
||||
baselines, err := ps.LoadBaselines()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load baselines: %w", err)
|
||||
}
|
||||
|
||||
if len(baselines) == 0 {
|
||||
return fmt.Errorf("no baselines found - run EstablishBaselines() first")
|
||||
}
|
||||
|
||||
var failures []string
|
||||
passed := 0
|
||||
total := 0
|
||||
results := make(map[string]string)
|
||||
|
||||
for _, bench := range ps.Benchmarks {
|
||||
baseline, exists := baselines[bench.Name]
|
||||
if !exists {
|
||||
fmt.Printf("⚠️ %s: No baseline found, skipping\n", bench.Name)
|
||||
continue
|
||||
}
|
||||
|
||||
fmt.Printf(" %s...", bench.Name)
|
||||
|
||||
current, err := ps.RunBenchmark(bench)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to run benchmark %s: %w", bench.Name, err)
|
||||
}
|
||||
|
||||
total++
|
||||
|
||||
// Calculate changes
|
||||
timeChange := calculateChange(baseline.NsPerOp, current.NsPerOp)
|
||||
allocChange := calculateChange(baseline.AllocsPerOp, current.AllocsPerOp)
|
||||
memChange := calculateChange(baseline.BytesPerOp, current.BytesPerOp)
|
||||
|
||||
// Check for regressions
|
||||
hasRegression := false
|
||||
var issues []string
|
||||
|
||||
if timeChange > bench.RegressionLimit {
|
||||
hasRegression = true
|
||||
issues = append(issues, fmt.Sprintf("%.1f%% slower", timeChange))
|
||||
}
|
||||
|
||||
if allocChange > bench.RegressionLimit {
|
||||
hasRegression = true
|
||||
issues = append(issues, fmt.Sprintf("%.1f%% more allocs", allocChange))
|
||||
}
|
||||
|
||||
if memChange > bench.RegressionLimit {
|
||||
hasRegression = true
|
||||
issues = append(issues, fmt.Sprintf("%.1f%% more memory", memChange))
|
||||
}
|
||||
|
||||
if hasRegression {
|
||||
status := fmt.Sprintf(" ❌ REGRESSION: %s", strings.Join(issues, ", "))
|
||||
failures = append(failures, fmt.Sprintf("%s: %s", bench.Name, strings.Join(issues, ", ")))
|
||||
fmt.Println(status)
|
||||
results[bench.Name] = "FAIL: " + strings.Join(issues, ", ")
|
||||
} else {
|
||||
status := " ✅ PASS"
|
||||
if timeChange != 0 || allocChange != 0 || memChange != 0 {
|
||||
status += fmt.Sprintf(" (%.1f%% time, %.1f%% allocs, %.1f%% mem)", timeChange, allocChange, memChange)
|
||||
}
|
||||
fmt.Println(status)
|
||||
passed++
|
||||
results[bench.Name] = "PASS"
|
||||
}
|
||||
}
|
||||
|
||||
// Report summary
|
||||
fmt.Printf("\n📊 Performance regression check completed:\n")
|
||||
fmt.Printf(" • %d tests passed\n", passed)
|
||||
fmt.Printf(" • %d tests failed\n", len(failures))
|
||||
fmt.Printf(" • %d tests total\n", total)
|
||||
|
||||
// Generate the report file
|
||||
if ps.EnableReports {
|
||||
summary := fmt.Sprintf("%d/%d tests passed.", passed, total)
|
||||
if len(failures) > 0 {
|
||||
summary = fmt.Sprintf("%d regressions detected.", len(failures))
|
||||
}
|
||||
|
||||
report := RegressionReport{
|
||||
Summary: summary,
|
||||
Failures: failures,
|
||||
Passed: passed,
|
||||
Total: total,
|
||||
Results: results,
|
||||
}
|
||||
|
||||
data, err := json.MarshalIndent(report, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal report: %w", err)
|
||||
}
|
||||
// #nosec G306 -- 0644 is appropriate for benchmark report files
|
||||
if err := os.WriteFile(ps.ReportFile, data, 0644); err != nil {
|
||||
return fmt.Errorf("failed to write report file: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(failures) > 0 {
|
||||
fmt.Printf("\n❌ Performance regressions detected:\n")
|
||||
for _, failure := range failures {
|
||||
fmt.Printf(" • %s\n", failure)
|
||||
}
|
||||
|
||||
if ps.FailOnRegress {
|
||||
return fmt.Errorf("performance regressions detected")
|
||||
}
|
||||
} else {
|
||||
fmt.Printf("\n✅ No performance regressions detected!\n")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
Reference in New Issue
Block a user