dukcapil/internal/repository/analytics_repository.go
Aditya Siregar aa662a321f Update
2025-09-01 12:06:14 +07:00

752 lines
25 KiB
Go

package repository
import (
"context"
"fmt"
"time"
"github.com/google/uuid"
"gorm.io/gorm"
)
type AnalyticsRepository struct {
db *gorm.DB
}
func NewAnalyticsRepository(db *gorm.DB) *AnalyticsRepository {
return &AnalyticsRepository{db: db}
}
// GetLetterSummaryStats gets overall summary statistics using summary tables for better performance
func (r *AnalyticsRepository) GetLetterSummaryStats(ctx context.Context, startDate, endDate time.Time, userID, departmentID *uuid.UUID) (map[string]interface{}, error) {
db := DBFromContext(ctx, r.db)
stats := make(map[string]interface{})
// Use summary tables for better performance when possible
if userID == nil && departmentID != nil {
// Use department_letter_summary for department-specific stats
query := db.Table("department_letter_summary").
Where("department_id = ?", *departmentID)
if !startDate.IsZero() {
query = query.Where("summary_date >= ?", startDate)
}
if !endDate.IsZero() {
query = query.Where("summary_date <= ?", endDate)
}
var result struct {
TotalIncoming int64 `gorm:"column:total_incoming"`
TotalOutgoing int64 `gorm:"column:total_outgoing"`
PendingOutgoing int64 `gorm:"column:pending_outgoing"`
ApprovedOutgoing int64 `gorm:"column:approved_outgoing"`
RejectedOutgoing int64 `gorm:"column:rejected_outgoing"`
AvgResponseHours float64 `gorm:"column:avg_response_hours"`
CompletionRate float64 `gorm:"column:completion_rate"`
}
query.Select(`
COALESCE(SUM(incoming_count), 0) as total_incoming,
COALESCE(SUM(outgoing_count), 0) as total_outgoing,
COALESCE(SUM(pending_outgoing), 0) as pending_outgoing,
COALESCE(SUM(approved_outgoing), 0) as approved_outgoing,
COALESCE(SUM(rejected_outgoing), 0) as rejected_outgoing,
COALESCE(AVG(avg_response_hours), 0) as avg_response_hours,
COALESCE(AVG(completion_rate), 0) as completion_rate
`).Scan(&result)
stats["total_incoming"] = result.TotalIncoming
stats["total_outgoing"] = result.TotalOutgoing
stats["total_pending"] = result.PendingOutgoing
stats["total_approved"] = result.ApprovedOutgoing
stats["total_rejected"] = result.RejectedOutgoing
stats["total_archived"] = int64(0) // Calculate separately if needed
stats["avg_processing_time"] = result.AvgResponseHours
stats["completion_rate"] = result.CompletionRate
} else if userID == nil && departmentID == nil {
// Use letter_summary for overall stats
query := db.Table("letter_summary")
if !startDate.IsZero() {
query = query.Where("summary_date >= ?", startDate)
}
if !endDate.IsZero() {
query = query.Where("summary_date <= ?", endDate)
}
var result struct {
TotalIncoming int64 `gorm:"column:total_incoming"`
TotalOutgoing int64 `gorm:"column:total_outgoing"`
TotalPending int64 `gorm:"column:total_pending"`
TotalApproved int64 `gorm:"column:total_approved"`
TotalRejected int64 `gorm:"column:total_rejected"`
TotalArchived int64 `gorm:"column:total_archived"`
TotalSent int64 `gorm:"column:total_sent"`
AvgProcessing float64 `gorm:"column:avg_processing"`
}
query.Select(`
COALESCE(SUM(CASE WHEN letter_type = 'incoming' THEN total_count ELSE 0 END), 0) as total_incoming,
COALESCE(SUM(CASE WHEN letter_type = 'outgoing' THEN total_count ELSE 0 END), 0) as total_outgoing,
COALESCE(SUM(pending_count), 0) as total_pending,
COALESCE(SUM(approved_count), 0) as total_approved,
COALESCE(SUM(rejected_count), 0) as total_rejected,
COALESCE(SUM(archived_count), 0) as total_archived,
COALESCE(SUM(sent_count), 0) as total_sent,
COALESCE(AVG(avg_processing_hours), 0) as avg_processing
`).Scan(&result)
stats["total_incoming"] = result.TotalIncoming
stats["total_outgoing"] = result.TotalOutgoing
stats["total_pending"] = result.TotalPending
stats["total_approved"] = result.TotalApproved
stats["total_rejected"] = result.TotalRejected
stats["total_archived"] = result.TotalArchived
stats["avg_processing_time"] = result.AvgProcessing
// Calculate completion rate
completionRate := float64(0)
if result.TotalOutgoing > 0 {
completedCount := result.TotalSent + result.TotalArchived
completionRate = float64(completedCount) / float64(result.TotalOutgoing) * 100
}
stats["completion_rate"] = completionRate
} else {
// Fall back to original implementation for user-specific queries
// Base query builders
incomingQuery := db.Table("letters_incoming").Where("letters_incoming.deleted_at IS NULL")
outgoingQuery := db.Table("letters_outgoing").Where("letters_outgoing.deleted_at IS NULL")
// Apply date filters
if !startDate.IsZero() {
incomingQuery = incomingQuery.Where("letters_incoming.created_at >= ?", startDate)
outgoingQuery = outgoingQuery.Where("letters_outgoing.created_at >= ?", startDate)
}
if !endDate.IsZero() {
incomingQuery = incomingQuery.Where("letters_incoming.created_at <= ?", endDate)
outgoingQuery = outgoingQuery.Where("letters_outgoing.created_at <= ?", endDate)
}
// Apply user/department filters for outgoing letters
if userID != nil {
outgoingQuery = outgoingQuery.
Joins("LEFT JOIN letter_outgoing_recipients ON letter_outgoing_recipients.letter_id = letters_outgoing.id").
Where("letter_outgoing_recipients.user_id = ?", *userID)
}
// Count incoming letters
var totalIncoming int64
incomingQuery.Count(&totalIncoming)
stats["total_incoming"] = totalIncoming
// Count outgoing letters
var totalOutgoing int64
outgoingQuery.Count(&totalOutgoing)
stats["total_outgoing"] = totalOutgoing
// Count by status - need to clone query for each count
var pendingCount, approvedCount, rejectedCount, archivedCount int64
db.Table("letters_outgoing").Where("letters_outgoing.deleted_at IS NULL").
Where("letters_outgoing.status = ?", "pending_approval").
Joins("LEFT JOIN letter_outgoing_recipients ON letter_outgoing_recipients.letter_id = letters_outgoing.id").
Where("letter_outgoing_recipients.user_id = ?", *userID).
Count(&pendingCount)
db.Table("letters_outgoing").Where("letters_outgoing.deleted_at IS NULL").
Where("letters_outgoing.status = ?", "approved").
Joins("LEFT JOIN letter_outgoing_recipients ON letter_outgoing_recipients.letter_id = letters_outgoing.id").
Where("letter_outgoing_recipients.user_id = ?", *userID).
Count(&approvedCount)
db.Table("letters_outgoing").Where("letters_outgoing.deleted_at IS NULL").
Where("letters_outgoing.status = ?", "rejected").
Joins("LEFT JOIN letter_outgoing_recipients ON letter_outgoing_recipients.letter_id = letters_outgoing.id").
Where("letter_outgoing_recipients.user_id = ?", *userID).
Count(&rejectedCount)
db.Table("letters_outgoing").Where("letters_outgoing.deleted_at IS NULL").
Where("letters_outgoing.status = ?", "archived").
Joins("LEFT JOIN letter_outgoing_recipients ON letter_outgoing_recipients.letter_id = letters_outgoing.id").
Where("letter_outgoing_recipients.user_id = ?", *userID).
Count(&archivedCount)
stats["total_pending"] = pendingCount
stats["total_approved"] = approvedCount
stats["total_rejected"] = rejectedCount
stats["total_archived"] = archivedCount
// Calculate average processing time
var avgProcessingTime float64
db.Table("letters_outgoing").
Select("AVG(EXTRACT(EPOCH FROM (letters_outgoing.updated_at - letters_outgoing.created_at))/3600) as avg_hours").
Where("letters_outgoing.status IN ('approved', 'sent', 'archived')").
Where("letters_outgoing.deleted_at IS NULL").
Scan(&avgProcessingTime)
stats["avg_processing_time"] = avgProcessingTime
// Calculate completion rate
var completedCount int64
db.Table("letters_outgoing").Where("letters_outgoing.deleted_at IS NULL").
Where("letters_outgoing.status IN ('sent', 'archived')").
Joins("LEFT JOIN letter_outgoing_recipients ON letter_outgoing_recipients.letter_id = letters_outgoing.id").
Where("letter_outgoing_recipients.user_id = ?", *userID).
Count(&completedCount)
completionRate := float64(0)
if totalOutgoing > 0 {
completionRate = float64(completedCount) / float64(totalOutgoing) * 100
}
stats["completion_rate"] = completionRate
}
return stats, nil
}
// GetStatusDistribution gets letter distribution by status
func (r *AnalyticsRepository) GetStatusDistribution(ctx context.Context, startDate, endDate time.Time, userID *uuid.UUID) ([]map[string]interface{}, error) {
db := DBFromContext(ctx, r.db)
var results []map[string]interface{}
query := `
WITH combined_letters AS (
SELECT
status,
'incoming' as type,
COUNT(*) as count
FROM letters_incoming
WHERE deleted_at IS NULL
%s
GROUP BY status
UNION ALL
SELECT
lo.status,
'outgoing' as type,
COUNT(DISTINCT lo.id) as count
FROM letters_outgoing lo
%s
WHERE lo.deleted_at IS NULL
%s
GROUP BY lo.status
)
SELECT
status,
type,
count,
ROUND(count * 100.0 / SUM(count) OVER (PARTITION BY type), 2) as percentage
FROM combined_letters
ORDER BY type, count DESC
`
incomingDateFilter := ""
outgoingDateFilter := ""
if !startDate.IsZero() {
incomingDateFilter += fmt.Sprintf(" AND created_at >= '%s'", startDate.Format("2006-01-02"))
outgoingDateFilter += fmt.Sprintf(" AND lo.created_at >= '%s'", startDate.Format("2006-01-02"))
}
if !endDate.IsZero() {
incomingDateFilter += fmt.Sprintf(" AND created_at <= '%s'", endDate.Format("2006-01-02"))
outgoingDateFilter += fmt.Sprintf(" AND lo.created_at <= '%s'", endDate.Format("2006-01-02"))
}
joinClause := ""
userFilter := ""
if userID != nil {
joinClause = "LEFT JOIN letter_outgoing_recipients lor ON lor.letter_id = lo.id"
userFilter = fmt.Sprintf(" AND lor.user_id = '%s'", userID.String())
}
query = fmt.Sprintf(query, incomingDateFilter, joinClause, outgoingDateFilter+userFilter)
if err := db.Raw(query).Scan(&results).Error; err != nil {
return nil, err
}
return results, nil
}
// GetPriorityDistribution gets letter distribution by priority
func (r *AnalyticsRepository) GetPriorityDistribution(ctx context.Context, startDate, endDate time.Time) ([]map[string]interface{}, error) {
db := DBFromContext(ctx, r.db)
var results []map[string]interface{}
query := `
SELECT
p.id as priority_id,
p.name as priority_name,
p.level,
COUNT(lo.id) as count,
ROUND(COUNT(lo.id) * 100.0 / SUM(COUNT(lo.id)) OVER (), 2) as percentage,
AVG(EXTRACT(EPOCH FROM (lo.updated_at - lo.created_at))/3600) as avg_response_time
FROM priorities p
LEFT JOIN letters_outgoing lo ON lo.priority_id = p.id AND lo.deleted_at IS NULL
WHERE 1=1
%s
GROUP BY p.id, p.name, p.level
ORDER BY p.level ASC
`
dateFilter := ""
if !startDate.IsZero() {
dateFilter += fmt.Sprintf(" AND lo.created_at >= '%s'", startDate.Format("2006-01-02"))
}
if !endDate.IsZero() {
dateFilter += fmt.Sprintf(" AND lo.created_at <= '%s'", endDate.Format("2006-01-02"))
}
query = fmt.Sprintf(query, dateFilter)
if err := db.Raw(query).Scan(&results).Error; err != nil {
return nil, err
}
return results, nil
}
// GetDepartmentStats gets statistics per department using summary tables
func (r *AnalyticsRepository) GetDepartmentStats(ctx context.Context, startDate, endDate time.Time) ([]map[string]interface{}, error) {
db := DBFromContext(ctx, r.db)
var results []map[string]interface{}
// First try using summary table for better performance
query := `
SELECT
d.id as department_id,
d.name as department_name,
d.code as department_code,
COALESCE(SUM(dls.incoming_count), 0) as incoming_count,
COALESCE(SUM(dls.outgoing_count), 0) as outgoing_count,
COALESCE(SUM(dls.pending_outgoing), 0) as pending_count,
COALESCE(AVG(dls.avg_response_hours), 0) as avg_response_time,
COALESCE(AVG(dls.completion_rate), 0) as completion_rate
FROM departments d
LEFT JOIN department_letter_summary dls ON dls.department_id = d.id
WHERE 1=1
%s
GROUP BY d.id, d.name, d.code
ORDER BY (COALESCE(SUM(dls.incoming_count), 0) + COALESCE(SUM(dls.outgoing_count), 0)) DESC
`
dateFilter := ""
if !startDate.IsZero() {
dateFilter += fmt.Sprintf(" AND dls.summary_date >= '%s'", startDate.Format("2006-01-02"))
}
if !endDate.IsZero() {
dateFilter += fmt.Sprintf(" AND dls.summary_date <= '%s'", endDate.Format("2006-01-02"))
}
query = fmt.Sprintf(query, dateFilter)
if err := db.Raw(query).Scan(&results).Error; err != nil {
return nil, err
}
// If no results from summary table, fall back to direct query
if len(results) == 0 {
fallbackQuery := `
SELECT
d.id as department_id,
d.name as department_name,
d.code as department_code,
COUNT(DISTINCT lir.letter_id) as incoming_count,
COUNT(DISTINCT lor.letter_id) as outgoing_count,
COUNT(DISTINCT CASE WHEN lo.status = 'pending_approval' THEN lo.id END) as pending_count,
COALESCE(AVG(CASE
WHEN lo.status IN ('approved', 'sent', 'archived')
THEN EXTRACT(EPOCH FROM (lo.updated_at - lo.created_at))/3600
END), 0) as avg_response_time,
CASE
WHEN COUNT(DISTINCT lo.id) > 0
THEN ROUND(COUNT(DISTINCT CASE WHEN lo.status IN ('sent', 'archived') THEN lo.id END) * 100.0 / COUNT(DISTINCT lo.id), 2)
ELSE 0
END as completion_rate
FROM departments d
LEFT JOIN letter_incoming_recipients lir ON lir.recipient_department_id = d.id
LEFT JOIN letter_outgoing_recipients lor ON lor.department_id = d.id
LEFT JOIN letters_outgoing lo ON lo.id = lor.letter_id AND lo.deleted_at IS NULL
WHERE 1=1
%s
GROUP BY d.id, d.name, d.code
ORDER BY (COUNT(DISTINCT lir.letter_id) + COUNT(DISTINCT lor.letter_id)) DESC
`
fallbackDateFilter := ""
if !startDate.IsZero() {
fallbackDateFilter += fmt.Sprintf(" AND (lo.created_at >= '%s' OR lo.created_at IS NULL)", startDate.Format("2006-01-02"))
}
if !endDate.IsZero() {
fallbackDateFilter += fmt.Sprintf(" AND (lo.created_at <= '%s' OR lo.created_at IS NULL)", endDate.Format("2006-01-02"))
}
fallbackQuery = fmt.Sprintf(fallbackQuery, fallbackDateFilter)
if err := db.Raw(fallbackQuery).Scan(&results).Error; err != nil {
return nil, err
}
}
return results, nil
}
// GetMonthlyTrend gets monthly trend data using summary tables for better performance
func (r *AnalyticsRepository) GetMonthlyTrend(ctx context.Context, months int) ([]map[string]interface{}, error) {
db := DBFromContext(ctx, r.db)
var results []map[string]interface{}
// Use summary table for better performance
query := `
WITH monthly_aggregated AS (
SELECT
TO_CHAR(summary_date, 'Month') as month,
EXTRACT(YEAR FROM summary_date) as year,
EXTRACT(MONTH FROM summary_date) as month_num,
SUM(CASE WHEN letter_type = 'incoming' THEN total_count ELSE 0 END) as incoming_count,
SUM(CASE WHEN letter_type = 'outgoing' THEN total_count ELSE 0 END) as outgoing_count,
SUM(total_count) as total_count
FROM letter_summary
WHERE summary_date >= NOW() - INTERVAL '%d months'
GROUP BY TO_CHAR(summary_date, 'Month'),
EXTRACT(YEAR FROM summary_date),
EXTRACT(MONTH FROM summary_date)
)
SELECT
month,
year,
incoming_count,
outgoing_count,
total_count,
LAG(total_count) OVER (ORDER BY year, month_num) as prev_total
FROM monthly_aggregated
ORDER BY year DESC, month_num DESC
LIMIT %d
`
query = fmt.Sprintf(query, months, months)
if err := db.Raw(query).Scan(&results).Error; err != nil {
// If summary table is empty, fall back to direct query
if len(results) == 0 {
fallbackQuery := `
WITH monthly_data AS (
SELECT
TO_CHAR(date_trunc('month', created_at), 'Month') as month,
EXTRACT(YEAR FROM created_at) as year,
EXTRACT(MONTH FROM created_at) as month_num,
COUNT(*) as incoming_count,
0 as outgoing_count
FROM letters_incoming
WHERE deleted_at IS NULL
AND created_at >= NOW() - INTERVAL '%d months'
GROUP BY date_trunc('month', created_at), EXTRACT(YEAR FROM created_at), EXTRACT(MONTH FROM created_at)
UNION ALL
SELECT
TO_CHAR(date_trunc('month', created_at), 'Month') as month,
EXTRACT(YEAR FROM created_at) as year,
EXTRACT(MONTH FROM created_at) as month_num,
0 as incoming_count,
COUNT(*) as outgoing_count
FROM letters_outgoing
WHERE deleted_at IS NULL
AND created_at >= NOW() - INTERVAL '%d months'
GROUP BY date_trunc('month', created_at), EXTRACT(YEAR FROM created_at), EXTRACT(MONTH FROM created_at)
)
SELECT
month,
year,
SUM(incoming_count) as incoming_count,
SUM(outgoing_count) as outgoing_count,
SUM(incoming_count + outgoing_count) as total_count,
LAG(SUM(incoming_count + outgoing_count)) OVER (ORDER BY year, month_num) as prev_total
FROM monthly_data
GROUP BY month, year, month_num
ORDER BY year DESC, month_num DESC
LIMIT %d
`
fallbackQuery = fmt.Sprintf(fallbackQuery, months, months, months)
if err := db.Raw(fallbackQuery).Scan(&results).Error; err != nil {
return nil, err
}
}
}
// Calculate growth rate
for i := range results {
if results[i]["prev_total"] != nil {
prevVal, ok := results[i]["prev_total"].(float64)
if ok && prevVal > 0 {
current := getFloat64FromInterface(results[i]["total_count"])
results[i]["growth_rate"] = ((current - prevVal) / prevVal) * 100
} else {
results[i]["growth_rate"] = float64(0)
}
} else {
results[i]["growth_rate"] = float64(0)
}
delete(results[i], "prev_total")
}
return results, nil
}
// Helper function to safely convert interface{} to float64
func getFloat64FromInterface(v interface{}) float64 {
if v == nil {
return 0
}
switch val := v.(type) {
case float64:
return val
case int64:
return float64(val)
case int:
return float64(val)
default:
return 0
}
}
// GetTopSenders gets top letter senders
func (r *AnalyticsRepository) GetTopSenders(ctx context.Context, limit int, startDate, endDate time.Time) ([]map[string]interface{}, error) {
db := DBFromContext(ctx, r.db)
var results []map[string]interface{}
query := `
SELECT
u.id as user_id,
u.name as user_name,
u.email as user_email,
COALESCE(d.name, 'No Department') as department,
COUNT(lo.id) as letter_count,
AVG(EXTRACT(EPOCH FROM (lo.updated_at - lo.created_at))/3600) as avg_response_time
FROM users u
LEFT JOIN letters_outgoing lo ON lo.created_by = u.id
LEFT JOIN user_department ud ON ud.user_id = u.id
LEFT JOIN departments d ON d.id = ud.department_id
WHERE lo.deleted_at IS NULL
%s
GROUP BY u.id, u.name, u.email, d.name
ORDER BY letter_count DESC
LIMIT %d
`
dateFilter := ""
if !startDate.IsZero() {
dateFilter += fmt.Sprintf(" AND lo.created_at >= '%s'", startDate.Format("2006-01-02"))
}
if !endDate.IsZero() {
dateFilter += fmt.Sprintf(" AND lo.created_at <= '%s'", endDate.Format("2006-01-02"))
}
query = fmt.Sprintf(query, dateFilter, limit)
if err := db.Raw(query).Scan(&results).Error; err != nil {
return nil, err
}
return results, nil
}
// GetInstitutionStats gets statistics per institution using summary tables
func (r *AnalyticsRepository) GetInstitutionStats(ctx context.Context, startDate, endDate time.Time) ([]map[string]interface{}, error) {
db := DBFromContext(ctx, r.db)
var results []map[string]interface{}
// Use summary table for better performance
query := `
SELECT
i.id as institution_id,
i.name as institution_name,
i.type as institution_type,
COALESCE(SUM(ils.incoming_sent), 0) as incoming_count,
COALESCE(SUM(ils.outgoing_received), 0) as outgoing_count,
COALESCE(SUM(ils.total_correspondence), 0) as total_count,
MAX(ils.last_activity_at) as last_activity
FROM institutions i
LEFT JOIN institution_letter_summary ils ON ils.institution_id = i.id
WHERE 1=1
%s
GROUP BY i.id, i.name, i.type
HAVING COALESCE(SUM(ils.total_correspondence), 0) > 0
ORDER BY total_count DESC
`
dateFilter := ""
if !startDate.IsZero() {
dateFilter += fmt.Sprintf(" AND ils.summary_date >= '%s'", startDate.Format("2006-01-02"))
}
if !endDate.IsZero() {
dateFilter += fmt.Sprintf(" AND ils.summary_date <= '%s'", endDate.Format("2006-01-02"))
}
query = fmt.Sprintf(query, dateFilter)
if err := db.Raw(query).Scan(&results).Error; err != nil {
return nil, err
}
return results, nil
}
// GetApprovalMetrics gets approval-related metrics using summary tables
func (r *AnalyticsRepository) GetApprovalMetrics(ctx context.Context, startDate, endDate time.Time) (map[string]interface{}, error) {
db := DBFromContext(ctx, r.db)
metrics := make(map[string]interface{})
// Use summary table for better performance
query := db.Table("approval_sla_summary")
if !startDate.IsZero() {
query = query.Where("summary_date >= ?", startDate)
}
if !endDate.IsZero() {
query = query.Where("summary_date <= ?", endDate)
}
var result struct {
TotalApprovals int64 `gorm:"column:total_approvals"`
ApprovedCount int64 `gorm:"column:approved_count"`
RejectedCount int64 `gorm:"column:rejected_count"`
PendingCount int64 `gorm:"column:pending_count"`
AvgApprovalHours float64 `gorm:"column:avg_approval_hours"`
AvgApprovalSteps float64 `gorm:"column:avg_approval_steps"`
SLACompliance float64 `gorm:"column:sla_compliance"`
WithinSLA int64 `gorm:"column:within_sla"`
ExceededSLA int64 `gorm:"column:exceeded_sla"`
}
query.Select(`
COALESCE(SUM(total_approvals), 0) as total_approvals,
COALESCE(SUM(approved_count), 0) as approved_count,
COALESCE(SUM(rejected_count), 0) as rejected_count,
COALESCE(SUM(pending_count), 0) as pending_count,
COALESCE(AVG(avg_approval_hours), 0) as avg_approval_hours,
COALESCE(AVG(avg_approval_steps), 0) as avg_approval_steps,
COALESCE(AVG(sla_compliance_rate), 0) as sla_compliance,
COALESCE(SUM(within_sla_count), 0) as within_sla,
COALESCE(SUM(exceeded_sla_count), 0) as exceeded_sla
`).Scan(&result)
metrics["total_submitted"] = result.TotalApprovals
metrics["total_approved"] = result.ApprovedCount
metrics["total_rejected"] = result.RejectedCount
metrics["total_pending"] = result.PendingCount
metrics["avg_approval_time"] = result.AvgApprovalHours
metrics["avg_approval_steps"] = result.AvgApprovalSteps
metrics["sla_compliance_rate"] = result.SLACompliance
metrics["within_sla_count"] = result.WithinSLA
metrics["exceeded_sla_count"] = result.ExceededSLA
// Calculate rates
if result.TotalApprovals > 0 {
metrics["approval_rate"] = float64(result.ApprovedCount) / float64(result.TotalApprovals) * 100
metrics["rejection_rate"] = float64(result.RejectedCount) / float64(result.TotalApprovals) * 100
} else {
metrics["approval_rate"] = float64(0)
metrics["rejection_rate"] = float64(0)
}
return metrics, nil
}
// GetDailyActivity gets daily activity data
func (r *AnalyticsRepository) GetDailyActivity(ctx context.Context, days int) ([]map[string]interface{}, error) {
db := DBFromContext(ctx, r.db)
var results []map[string]interface{}
query := `
WITH daily_data AS (
SELECT
DATE(created_at) as date,
TO_CHAR(created_at, 'Day') as day_of_week,
COUNT(CASE WHEN type = 'incoming' THEN 1 END) as incoming_count,
COUNT(CASE WHEN type = 'outgoing' THEN 1 END) as outgoing_count,
0 as approved_count,
0 as rejected_count
FROM (
SELECT created_at, 'incoming' as type FROM letters_incoming WHERE deleted_at IS NULL
UNION ALL
SELECT created_at, 'outgoing' as type FROM letters_outgoing WHERE deleted_at IS NULL
) combined
WHERE created_at >= CURRENT_DATE - INTERVAL '%d days'
GROUP BY DATE(created_at), TO_CHAR(created_at, 'Day')
),
approval_data AS (
SELECT
DATE(acted_at) as date,
COUNT(CASE WHEN status = 'approved' THEN 1 END) as approved_count,
COUNT(CASE WHEN status = 'rejected' THEN 1 END) as rejected_count
FROM letter_outgoing_approvals
WHERE acted_at IS NOT NULL
AND acted_at >= CURRENT_DATE - INTERVAL '%d days'
GROUP BY DATE(acted_at)
)
SELECT
d.date,
d.day_of_week,
d.incoming_count,
d.outgoing_count,
COALESCE(a.approved_count, 0) as approved_count,
COALESCE(a.rejected_count, 0) as rejected_count
FROM daily_data d
LEFT JOIN approval_data a ON a.date = d.date
ORDER BY d.date DESC
LIMIT %d
`
query = fmt.Sprintf(query, days, days, days)
if err := db.Raw(query).Scan(&results).Error; err != nil {
return nil, err
}
return results, nil
}
// GetResponseTimeStats gets response time statistics
func (r *AnalyticsRepository) GetResponseTimeStats(ctx context.Context, startDate, endDate time.Time) (map[string]interface{}, error) {
db := DBFromContext(ctx, r.db)
stats := make(map[string]interface{})
query := `
WITH response_times AS (
SELECT
EXTRACT(EPOCH FROM (updated_at - created_at))/3600 as response_time_hours
FROM letters_outgoing
WHERE status IN ('approved', 'sent', 'archived')
AND deleted_at IS NULL
%s
)
SELECT
MIN(response_time_hours) as min_response_time,
MAX(response_time_hours) as max_response_time,
AVG(response_time_hours) as avg_response_time,
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY response_time_hours) as median_response_time,
PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY response_time_hours) as p95_response_time,
PERCENTILE_CONT(0.99) WITHIN GROUP (ORDER BY response_time_hours) as p99_response_time
FROM response_times
`
dateFilter := ""
if !startDate.IsZero() {
dateFilter += fmt.Sprintf(" AND created_at >= '%s'", startDate.Format("2006-01-02"))
}
if !endDate.IsZero() {
dateFilter += fmt.Sprintf(" AND created_at <= '%s'", endDate.Format("2006-01-02"))
}
query = fmt.Sprintf(query, dateFilter)
if err := db.Raw(query).Scan(&stats).Error; err != nil {
return nil, err
}
return stats, nil
}