@@ -53,10 +53,10 @@ module CQL::Performance
53
53
54
54
def to_h : Hash (String , JSON ::Any )
55
55
{
56
- " total_queries" => JSON ::Any .new(@total_queries ),
57
- " slow_queries" => JSON ::Any .new(@slow_queries ),
58
- " very_slow_queries" => JSON ::Any .new(@very_slow_queries ),
59
- " error_queries" => JSON ::Any .new(@error_queries ),
56
+ " total_queries" => JSON ::Any .new(@total_queries ),
57
+ " slow_queries" => JSON ::Any .new(@slow_queries ),
58
+ " very_slow_queries" => JSON ::Any .new(@very_slow_queries ),
59
+ " error_queries" => JSON ::Any .new(@error_queries ),
60
60
" total_execution_time_ms" => JSON ::Any .new(@total_execution_time .total_milliseconds),
61
61
" avg_execution_time_ms" => JSON ::Any .new(@avg_execution_time .total_milliseconds),
62
62
" min_execution_time_ms" => JSON ::Any .new(@min_execution_time == Time ::Span ::MAX ? 0.0 : @min_execution_time .total_milliseconds),
@@ -142,11 +142,11 @@ module CQL::Performance
142
142
143
143
def to_h : Hash (String , JSON ::Any )
144
144
{
145
- " uptime_seconds" => JSON ::Any .new(@uptime .total_seconds),
146
- " memory_usage_mb" => JSON ::Any .new(@memory_usage_mb ),
147
- " cpu_usage_percent" => JSON ::Any .new(@cpu_usage_percent ),
148
- " active_connections" => JSON ::Any .new(@active_connections ),
149
- " max_connections" => JSON ::Any .new(@max_connections ),
145
+ " uptime_seconds" => JSON ::Any .new(@uptime .total_seconds),
146
+ " memory_usage_mb" => JSON ::Any .new(@memory_usage_mb ),
147
+ " cpu_usage_percent" => JSON ::Any .new(@cpu_usage_percent ),
148
+ " active_connections" => JSON ::Any .new(@active_connections ),
149
+ " max_connections" => JSON ::Any .new(@max_connections ),
150
150
" connection_pool_utilization_percent" => JSON ::Any .new(@connection_pool_utilization ),
151
151
}
152
152
end
@@ -172,16 +172,16 @@ module CQL::Performance
172
172
173
173
def to_h : Hash (String , JSON ::Any )
174
174
{
175
- " overall_health_score" => JSON ::Any .new(@overall_health_score ),
176
- " query_health_score" => JSON ::Any .new(@query_health_score ),
177
- " n_plus_one_health_score" => JSON ::Any .new(@n_plus_one_health_score ),
178
- " cache_health_score" => JSON ::Any .new(@cache_health_score ),
179
- " system_health_score" => JSON ::Any .new(@system_health_score ),
180
- " critical_issues" => JSON ::Any .new(@critical_issues ),
181
- " high_issues" => JSON ::Any .new(@high_issues ),
182
- " medium_issues" => JSON ::Any .new(@medium_issues ),
183
- " low_issues" => JSON ::Any .new(@low_issues ),
184
- " total_issues" => JSON ::Any .new(@total_issues ),
175
+ " overall_health_score" => JSON ::Any .new(@overall_health_score ),
176
+ " query_health_score" => JSON ::Any .new(@query_health_score ),
177
+ " n_plus_one_health_score" => JSON ::Any .new(@n_plus_one_health_score ),
178
+ " cache_health_score" => JSON ::Any .new(@cache_health_score ),
179
+ " system_health_score" => JSON ::Any .new(@system_health_score ),
180
+ " critical_issues" => JSON ::Any .new(@critical_issues ),
181
+ " high_issues" => JSON ::Any .new(@high_issues ),
182
+ " medium_issues" => JSON ::Any .new(@medium_issues ),
183
+ " low_issues" => JSON ::Any .new(@low_issues ),
184
+ " total_issues" => JSON ::Any .new(@total_issues ),
185
185
}
186
186
end
187
187
end
@@ -198,10 +198,10 @@ module CQL::Performance
198
198
199
199
def to_h : Hash (String , JSON ::Any )
200
200
{
201
- " slowest_queries" => JSON ::Any .new(@slowest_queries .map { |q | PerformanceMetrics .to_json_any(q .to_h) }),
202
- " most_frequent_queries" => JSON ::Any .new(@most_frequent_queries .map { |q | PerformanceMetrics .to_json_any(q ) }),
203
- " highest_error_queries" => JSON ::Any .new(@highest_error_queries .map { |q | PerformanceMetrics .to_json_any(q ) }),
204
- " most_expensive_queries" => JSON ::Any .new(@most_expensive_queries .map { |q | PerformanceMetrics .to_json_any(q ) }),
201
+ " slowest_queries" => JSON ::Any .new(@slowest_queries .map { |query | PerformanceMetrics .to_json_any(query .to_h) }),
202
+ " most_frequent_queries" => JSON ::Any .new(@most_frequent_queries .map { |query | PerformanceMetrics .to_json_any(query ) }),
203
+ " highest_error_queries" => JSON ::Any .new(@highest_error_queries .map { |query | PerformanceMetrics .to_json_any(query ) }),
204
+ " most_expensive_queries" => JSON ::Any .new(@most_expensive_queries .map { |query | PerformanceMetrics .to_json_any(query ) }),
205
205
}
206
206
end
207
207
end
@@ -218,10 +218,10 @@ module CQL::Performance
218
218
219
219
def to_h : Hash (String , JSON ::Any )
220
220
{
221
- " n_plus_one_patterns" => JSON ::Any .new(@n_plus_one_patterns .map { |p | PerformanceMetrics .to_json_any(p .to_h) }),
222
- " query_patterns" => JSON ::Any .new(@query_patterns .map { |p | PerformanceMetrics .to_json_any(p ) }),
223
- " time_distribution" => PerformanceMetrics .to_json_any(@time_distribution ),
224
- " error_patterns" => JSON ::Any .new(@error_patterns .map { |p | PerformanceMetrics .to_json_any(p ) }),
221
+ " n_plus_one_patterns" => JSON ::Any .new(@n_plus_one_patterns .map { |pattern | PerformanceMetrics .to_json_any(pattern .to_h) }),
222
+ " query_patterns" => JSON ::Any .new(@query_patterns .map { |pattern | PerformanceMetrics .to_json_any(pattern ) }),
223
+ " time_distribution" => PerformanceMetrics .to_json_any(@time_distribution ),
224
+ " error_patterns" => JSON ::Any .new(@error_patterns .map { |pattern | PerformanceMetrics .to_json_any(pattern ) }),
225
225
}
226
226
end
227
227
end
@@ -247,7 +247,7 @@ module CQL::Performance
247
247
@top_queries : TopQueries ,
248
248
@patterns : PerformancePatterns ,
249
249
@issues : Array (Issue ),
250
- @collection_duration : Time ::Span = Time ::Span .zero
250
+ @collection_duration : Time ::Span = Time ::Span .zero,
251
251
)
252
252
end
253
253
@@ -257,7 +257,7 @@ module CQL::Performance
257
257
detector : NPlusOneDetectorInterface ? = nil ,
258
258
cache : Cache ? = nil ,
259
259
start_time : Time ? = nil ,
260
- config : Config ? = nil
260
+ config : Config ? = nil ,
261
261
) : self
262
262
start_time ||= Time .utc
263
263
collection_duration = Time .utc - start_time
@@ -302,16 +302,16 @@ module CQL::Performance
302
302
# Export all metrics as a comprehensive hash
303
303
def to_h : Hash (String , JSON ::Any )
304
304
{
305
- " timestamp" => JSON ::Any .new(@timestamp .to_rfc3339),
305
+ " timestamp" => JSON ::Any .new(@timestamp .to_rfc3339),
306
306
" collection_duration_seconds" => JSON ::Any .new(@collection_duration .total_seconds),
307
- " query_metrics" => JSON ::Any .new(@query_metrics .to_h),
308
- " n_plus_one_metrics" => JSON ::Any .new(@n_plus_one_metrics .to_h),
309
- " cache_metrics" => JSON ::Any .new(@cache_metrics .to_h),
310
- " system_metrics" => JSON ::Any .new(@system_metrics .to_h),
311
- " health_metrics" => JSON ::Any .new(@health_metrics .to_h),
312
- " top_queries" => JSON ::Any .new(@top_queries .to_h),
313
- " patterns" => JSON ::Any .new(@patterns .to_h),
314
- " issues" => JSON ::Any .new(@issues .map { |i | PerformanceMetrics .to_json_any(i.to_h) }),
307
+ " query_metrics" => JSON ::Any .new(@query_metrics .to_h),
308
+ " n_plus_one_metrics" => JSON ::Any .new(@n_plus_one_metrics .to_h),
309
+ " cache_metrics" => JSON ::Any .new(@cache_metrics .to_h),
310
+ " system_metrics" => JSON ::Any .new(@system_metrics .to_h),
311
+ " health_metrics" => JSON ::Any .new(@health_metrics .to_h),
312
+ " top_queries" => JSON ::Any .new(@top_queries .to_h),
313
+ " patterns" => JSON ::Any .new(@patterns .to_h),
314
+ " issues" => JSON ::Any .new(@issues .map { |i | PerformanceMetrics .to_json_any(i.to_h) }),
315
315
}
316
316
end
317
317
@@ -323,14 +323,14 @@ module CQL::Performance
323
323
# Get summary metrics for quick overview
324
324
def summary : Hash (String , String | Int32 | Int64 | Float64 )
325
325
{
326
- " total_queries" => @query_metrics .total_queries,
327
- " slow_queries" => @query_metrics .slow_queries,
328
- " error_rate_percent" => @query_metrics .error_rate,
329
- " avg_query_time_ms" => @query_metrics .avg_execution_time.total_milliseconds,
326
+ " total_queries" => @query_metrics .total_queries,
327
+ " slow_queries" => @query_metrics .slow_queries,
328
+ " error_rate_percent" => @query_metrics .error_rate,
329
+ " avg_query_time_ms" => @query_metrics .avg_execution_time.total_milliseconds,
330
330
" n_plus_one_patterns" => @n_plus_one_metrics .total_patterns,
331
- " health_score" => @health_metrics .overall_health_score.to_s,
332
- " total_issues" => @health_metrics .total_issues,
333
- " uptime_seconds" => @system_metrics .uptime.total_seconds,
331
+ " health_score" => @health_metrics .overall_health_score.to_s,
332
+ " total_issues" => @health_metrics .total_issues,
333
+ " uptime_seconds" => @system_metrics .uptime.total_seconds,
334
334
}
335
335
end
336
336
@@ -400,7 +400,7 @@ module CQL::Performance
400
400
401
401
# Count slow queries
402
402
slow_queries = profiler.slowest_queries(1000 ).size.to_i64
403
- very_slow_queries = profiler.slowest_queries(1000 ).select { |q | q .execution_time > 1 .second }.size.to_i64
403
+ very_slow_queries = profiler.slowest_queries(1000 ).count { |query | query .execution_time > 1 .second }
404
404
405
405
# Calculate rates
406
406
error_rate = 0.0 # TODO: Track errors properly
@@ -434,10 +434,10 @@ module CQL::Performance
434
434
max_repetitions = patterns.max_of?(& .repetition_count) || 0
435
435
436
436
# Count by severity
437
- critical_patterns = patterns.count { |p | p .repetition_count > 50 }
438
- high_patterns = patterns.count { |p | p .repetition_count > 20 && p .repetition_count <= 50 }
439
- medium_patterns = patterns.count { |p | p .repetition_count > 5 && p .repetition_count <= 20 }
440
- low_patterns = patterns.count { |p | p .repetition_count > 2 && p .repetition_count <= 5 }
437
+ critical_patterns = patterns.count { |pattern | pattern .repetition_count > 50 }
438
+ high_patterns = patterns.count { |pattern | pattern .repetition_count > 20 && pattern .repetition_count <= 50 }
439
+ medium_patterns = patterns.count { |pattern | pattern .repetition_count > 5 && pattern .repetition_count <= 20 }
440
+ low_patterns = patterns.count { |pattern | pattern .repetition_count > 2 && pattern .repetition_count <= 5 }
441
441
442
442
# Detection rate (placeholder)
443
443
detection_rate = 0.0
@@ -465,10 +465,10 @@ module CQL::Performance
465
465
0 _i64 , # cache_hits
466
466
0 _i64 , # cache_misses
467
467
cache.size,
468
- 1000 , # max_cache_size
469
- 0.0 , # hit_rate
470
- 0.0 , # miss_rate
471
- 0 _i64 # evictions
468
+ 1000 , # max_cache_size
469
+ 0.0 , # hit_rate
470
+ 0.0 , # miss_rate
471
+ 0 _i64 # evictions
472
472
)
473
473
end
474
474
@@ -478,11 +478,11 @@ module CQL::Performance
478
478
# Placeholder values - would need system monitoring
479
479
SystemMetrics .new(
480
480
uptime,
481
- 0.0 , # memory_usage_mb
482
- 0.0 , # cpu_usage_percent
483
- 0 , # active_connections
484
- 100 , # max_connections
485
- 0.0 # connection_pool_utilization
481
+ 0.0 , # memory_usage_mb
482
+ 0.0 , # cpu_usage_percent
483
+ 0 , # active_connections
484
+ 100 , # max_connections
485
+ 0.0 # connection_pool_utilization
486
486
)
487
487
end
488
488
@@ -506,7 +506,7 @@ module CQL::Performance
506
506
query_health_score,
507
507
n_plus_one_health_score,
508
508
cache_health_score,
509
- system_health_score
509
+ system_health_score,
510
510
].sum / 4 ).to_i
511
511
512
512
HealthMetrics .new(
@@ -540,23 +540,23 @@ module CQL::Performance
540
540
stats = profiler.statistics
541
541
most_frequent = stats.map do |sql , stat |
542
542
{
543
- sql: sql,
544
- count: stat[:count ].to_i64,
545
- avg_time: stat[:avg_ms ].milliseconds
543
+ sql: sql,
544
+ count: stat[:count ].to_i64,
545
+ avg_time: stat[:avg_ms ].milliseconds,
546
546
}
547
- end .sort_by(& .[:count ]).reverse!.first(10 )
547
+ end .sort_by! (& .[:count ]).reverse!.first(10 )
548
548
549
549
# Highest error queries (placeholder)
550
550
highest_error_queries = [] of NamedTuple (sql: String , errors: Int64 , error_rate: Float64 )
551
551
552
552
# Most expensive queries (total time)
553
553
most_expensive = stats.map do |sql , stat |
554
554
{
555
- sql: sql,
555
+ sql: sql,
556
556
total_time: stat[:total_ms ].milliseconds,
557
- count: stat[:count ].to_i64
557
+ count: stat[:count ].to_i64,
558
558
}
559
- end .sort_by(& .[:total_time ]).reverse!.first(10 )
559
+ end .sort_by! (& .[:total_time ]).reverse!.first(10 )
560
560
561
561
TopQueries .new(slowest_queries, most_frequent, highest_error_queries, most_expensive)
562
562
end
@@ -571,18 +571,18 @@ module CQL::Performance
571
571
stats = profiler.statistics
572
572
query_patterns = stats.map do |sql , stat |
573
573
{
574
- pattern: sql,
575
- count: stat[:count ].to_i64,
576
- avg_time: stat[:avg_ms ].milliseconds
574
+ pattern: sql,
575
+ count: stat[:count ].to_i64,
576
+ avg_time: stat[:avg_ms ].milliseconds,
577
577
}
578
- end .sort_by(& .[:count ]).reverse!.first(20 )
578
+ end .sort_by! (& .[:count ]).reverse!.first(20 )
579
579
end
580
580
581
581
# Time distribution
582
582
time_distribution = {
583
- " fast" => 0 _i64 ,
584
- " slow" => 0 _i64 ,
585
- " very_slow" => 0 _i64
583
+ " fast" => 0 _i64 ,
584
+ " slow" => 0 _i64 ,
585
+ " very_slow" => 0 _i64 ,
586
586
}
587
587
588
588
# Error patterns (placeholder)
@@ -606,7 +606,7 @@ module CQL::Performance
606
606
607
607
total_queries = stats.values.sum(& .[:count ])
608
608
slow_queries = profiler.slowest_queries(1000 ).size
609
- very_slow_queries = profiler.slowest_queries(1000 ).select { |q | q .execution_time > 1 .second }.size
609
+ very_slow_queries = profiler.slowest_queries(1000 ).count { |query | query .execution_time > 1 .second }
610
610
611
611
score = 100
612
612
score -= (slow_queries.to_f / total_queries * 20 ).to_i if total_queries > 0
@@ -620,8 +620,8 @@ module CQL::Performance
620
620
patterns = detector.patterns
621
621
return 100 if patterns.empty?
622
622
623
- critical_patterns = patterns.count { |p | p .repetition_count > 50 }
624
- high_patterns = patterns.count { |p | p .repetition_count > 20 }
623
+ critical_patterns = patterns.count { |pattern | pattern .repetition_count > 50 }
624
+ high_patterns = patterns.count { |pattern | pattern .repetition_count > 20 }
625
625
626
626
score = 100
627
627
score -= critical_patterns * 30
@@ -641,13 +641,13 @@ module CQL::Performance
641
641
struct QueryData
642
642
def to_h : Hash (String , JSON ::Any )
643
643
{
644
- " sql" => JSON ::Any .new(@sql ),
645
- " params" => PerformanceMetrics .to_json_any(@params ),
644
+ " sql" => JSON ::Any .new(@sql ),
645
+ " params" => PerformanceMetrics .to_json_any(@params ),
646
646
" execution_time_ms" => JSON ::Any .new(@execution_time .total_milliseconds),
647
- " timestamp" => JSON ::Any .new(@timestamp .to_rfc3339),
648
- " rows_affected" => JSON ::Any .new(@rows_affected || 0 _i64 ),
649
- " error" => JSON ::Any .new(@error || " " ),
650
- " normalized_sql" => JSON ::Any .new(normalized_sql),
647
+ " timestamp" => JSON ::Any .new(@timestamp .to_rfc3339),
648
+ " rows_affected" => JSON ::Any .new(@rows_affected || 0 _i64 ),
649
+ " error" => JSON ::Any .new(@error || " " ),
650
+ " normalized_sql" => JSON ::Any .new(normalized_sql),
651
651
}
652
652
end
653
653
end
@@ -656,10 +656,10 @@ module CQL::Performance
656
656
struct NPlusOnePattern
657
657
def to_h : Hash (String , JSON ::Any )
658
658
{
659
- " parent_query" => JSON ::Any .new(@parent_query ),
660
- " repeated_query" => JSON ::Any .new(@repeated_query ),
659
+ " parent_query" => JSON ::Any .new(@parent_query ),
660
+ " repeated_query" => JSON ::Any .new(@repeated_query ),
661
661
" repetition_count" => JSON ::Any .new(@repetition_count ),
662
- " timestamp" => JSON ::Any .new(@timestamp .to_rfc3339),
662
+ " timestamp" => JSON ::Any .new(@timestamp .to_rfc3339),
663
663
}
664
664
end
665
665
end
@@ -668,10 +668,10 @@ module CQL::Performance
668
668
struct Issue
669
669
def to_h : Hash (String , JSON ::Any )
670
670
{
671
- " type" => JSON ::Any .new(@type .to_s),
672
- " severity" => JSON ::Any .new(@severity .to_s),
673
- " message" => JSON ::Any .new(@message ),
674
- " details" => PerformanceMetrics .to_json_any(@details ),
671
+ " type" => JSON ::Any .new(@type .to_s),
672
+ " severity" => JSON ::Any .new(@severity .to_s),
673
+ " message" => JSON ::Any .new(@message ),
674
+ " details" => PerformanceMetrics .to_json_any(@details ),
675
675
" timestamp" => JSON ::Any .new(@timestamp .to_rfc3339),
676
676
}
677
677
end
0 commit comments