715 lines
30 KiB
Python
715 lines
30 KiB
Python
"""
|
|
Analytics & Predictive Insights views for Enterprise Incident Management API
|
|
Implements comprehensive analytics endpoints for KPIs, predictive insights, and dashboards
|
|
"""
|
|
from rest_framework import viewsets, status, permissions
|
|
from rest_framework.decorators import action
|
|
from rest_framework.response import Response
|
|
from rest_framework.pagination import PageNumberPagination
|
|
from django_filters.rest_framework import DjangoFilterBackend
|
|
from django_filters import rest_framework as filters
|
|
from django.db.models import Q, Avg, Count, Sum, Max, Min
|
|
from django.utils import timezone
|
|
from datetime import datetime, timedelta
|
|
from decimal import Decimal
|
|
|
|
from ..models import (
|
|
KPIMetric, KPIMeasurement, IncidentRecurrenceAnalysis, PredictiveModel,
|
|
AnomalyDetection, CostImpactAnalysis, DashboardConfiguration,
|
|
HeatmapData, PredictiveInsight
|
|
)
|
|
from ..serializers.analytics import (
|
|
KPIMetricSerializer, KPIMeasurementSerializer, IncidentRecurrenceAnalysisSerializer,
|
|
PredictiveModelSerializer, AnomalyDetectionSerializer, CostImpactAnalysisSerializer,
|
|
DashboardConfigurationSerializer, HeatmapDataSerializer, PredictiveInsightSerializer,
|
|
KPISummarySerializer, AnomalySummarySerializer, CostSummarySerializer,
|
|
PredictiveInsightSummarySerializer, DashboardDataSerializer
|
|
)
|
|
|
|
|
|
class StandardResultsSetPagination(PageNumberPagination):
|
|
"""Standard pagination for analytics endpoints"""
|
|
page_size = 20
|
|
page_size_query_param = 'page_size'
|
|
max_page_size = 100
|
|
|
|
|
|
class KPIMetricFilter(filters.FilterSet):
|
|
"""Filter for KPI metrics"""
|
|
|
|
metric_type = filters.ChoiceFilter(choices=KPIMetric.METRIC_TYPES)
|
|
is_active = filters.BooleanFilter()
|
|
is_system_metric = filters.BooleanFilter()
|
|
created_after = filters.DateTimeFilter(field_name='created_at', lookup_expr='gte')
|
|
created_before = filters.DateTimeFilter(field_name='created_at', lookup_expr='lte')
|
|
|
|
class Meta:
|
|
model = KPIMetric
|
|
fields = ['metric_type', 'is_active', 'is_system_metric', 'created_after', 'created_before']
|
|
|
|
|
|
class KPIMetricViewSet(viewsets.ModelViewSet):
|
|
"""ViewSet for KPI metrics management"""
|
|
|
|
queryset = KPIMetric.objects.all()
|
|
serializer_class = KPIMetricSerializer
|
|
pagination_class = StandardResultsSetPagination
|
|
filter_backends = [DjangoFilterBackend]
|
|
filterset_class = KPIMetricFilter
|
|
permission_classes = [permissions.IsAuthenticated]
|
|
|
|
def get_queryset(self):
|
|
"""Filter queryset based on user permissions"""
|
|
queryset = super().get_queryset()
|
|
|
|
# Add any additional filtering based on user permissions
|
|
if not self.request.user.is_staff:
|
|
# Non-staff users can only see active metrics
|
|
queryset = queryset.filter(is_active=True)
|
|
|
|
return queryset.order_by('-created_at')
|
|
|
|
@action(detail=True, methods=['get'])
|
|
def measurements(self, request, pk=None):
|
|
"""Get measurements for a specific KPI metric"""
|
|
metric = self.get_object()
|
|
measurements = metric.measurements.all().order_by('-calculated_at')
|
|
|
|
# Apply date filtering if provided
|
|
start_date = request.query_params.get('start_date')
|
|
end_date = request.query_params.get('end_date')
|
|
|
|
if start_date:
|
|
measurements = measurements.filter(measurement_period_start__gte=start_date)
|
|
if end_date:
|
|
measurements = measurements.filter(measurement_period_end__lte=end_date)
|
|
|
|
# Paginate results
|
|
paginator = StandardResultsSetPagination()
|
|
page = paginator.paginate_queryset(measurements, request)
|
|
|
|
if page is not None:
|
|
serializer = KPIMeasurementSerializer(page, many=True)
|
|
return paginator.get_paginated_response(serializer.data)
|
|
|
|
serializer = KPIMeasurementSerializer(measurements, many=True)
|
|
return Response(serializer.data)
|
|
|
|
@action(detail=False, methods=['get'])
|
|
def summary(self, request):
|
|
"""Get summary of all KPI metrics"""
|
|
metrics = self.get_queryset()
|
|
|
|
# Get latest measurements for each metric
|
|
summaries = []
|
|
for metric in metrics:
|
|
latest_measurement = metric.measurements.first()
|
|
if latest_measurement:
|
|
# Calculate trend (simplified - compare with previous measurement)
|
|
previous_measurement = metric.measurements.all()[1:2].first()
|
|
trend = 'stable'
|
|
trend_percentage = Decimal('0.00')
|
|
|
|
if previous_measurement:
|
|
if latest_measurement.value > previous_measurement.value:
|
|
trend = 'up'
|
|
trend_percentage = ((latest_measurement.value - previous_measurement.value) / previous_measurement.value) * 100
|
|
elif latest_measurement.value < previous_measurement.value:
|
|
trend = 'down'
|
|
trend_percentage = ((previous_measurement.value - latest_measurement.value) / previous_measurement.value) * 100
|
|
|
|
summary_data = {
|
|
'metric_type': metric.metric_type,
|
|
'metric_name': metric.name,
|
|
'current_value': latest_measurement.value,
|
|
'unit': latest_measurement.unit,
|
|
'trend': trend,
|
|
'trend_percentage': trend_percentage,
|
|
'period_start': latest_measurement.measurement_period_start,
|
|
'period_end': latest_measurement.measurement_period_end,
|
|
'incident_count': latest_measurement.incident_count,
|
|
'target_value': None, # Could be added to metric model
|
|
'target_met': True # Could be calculated based on target
|
|
}
|
|
summaries.append(summary_data)
|
|
|
|
serializer = KPISummarySerializer(summaries, many=True)
|
|
return Response(serializer.data)
|
|
|
|
|
|
class KPIMeasurementViewSet(viewsets.ReadOnlyModelViewSet):
|
|
"""ViewSet for KPI measurements (read-only)"""
|
|
|
|
queryset = KPIMeasurement.objects.all()
|
|
serializer_class = KPIMeasurementSerializer
|
|
pagination_class = StandardResultsSetPagination
|
|
permission_classes = [permissions.IsAuthenticated]
|
|
|
|
def get_queryset(self):
|
|
"""Filter queryset based on query parameters"""
|
|
queryset = super().get_queryset()
|
|
|
|
# Filter by metric
|
|
metric_id = self.request.query_params.get('metric_id')
|
|
if metric_id:
|
|
queryset = queryset.filter(metric_id=metric_id)
|
|
|
|
# Filter by date range
|
|
start_date = self.request.query_params.get('start_date')
|
|
end_date = self.request.query_params.get('end_date')
|
|
|
|
if start_date:
|
|
queryset = queryset.filter(measurement_period_start__gte=start_date)
|
|
if end_date:
|
|
queryset = queryset.filter(measurement_period_end__lte=end_date)
|
|
|
|
return queryset.order_by('-calculated_at')
|
|
|
|
|
|
class IncidentRecurrenceAnalysisViewSet(viewsets.ReadOnlyModelViewSet):
|
|
"""ViewSet for incident recurrence analysis"""
|
|
|
|
queryset = IncidentRecurrenceAnalysis.objects.all()
|
|
serializer_class = IncidentRecurrenceAnalysisSerializer
|
|
pagination_class = StandardResultsSetPagination
|
|
permission_classes = [permissions.IsAuthenticated]
|
|
|
|
def get_queryset(self):
|
|
"""Filter queryset based on query parameters"""
|
|
queryset = super().get_queryset()
|
|
|
|
# Filter by recurrence type
|
|
recurrence_type = self.request.query_params.get('recurrence_type')
|
|
if recurrence_type:
|
|
queryset = queryset.filter(recurrence_type=recurrence_type)
|
|
|
|
# Filter by confidence score
|
|
min_confidence = self.request.query_params.get('min_confidence')
|
|
if min_confidence:
|
|
queryset = queryset.filter(confidence_score__gte=float(min_confidence))
|
|
|
|
# Filter by resolution status
|
|
is_resolved = self.request.query_params.get('is_resolved')
|
|
if is_resolved is not None:
|
|
queryset = queryset.filter(is_resolved=is_resolved.lower() == 'true')
|
|
|
|
return queryset.order_by('-confidence_score', '-created_at')
|
|
|
|
@action(detail=False, methods=['get'])
|
|
def unresolved(self, request):
|
|
"""Get unresolved recurrence analyses"""
|
|
queryset = self.get_queryset().filter(is_resolved=False)
|
|
|
|
paginator = StandardResultsSetPagination()
|
|
page = paginator.paginate_queryset(queryset, request)
|
|
|
|
if page is not None:
|
|
serializer = self.get_serializer(page, many=True)
|
|
return paginator.get_paginated_response(serializer.data)
|
|
|
|
serializer = self.get_serializer(queryset, many=True)
|
|
return Response(serializer.data)
|
|
|
|
|
|
class PredictiveModelViewSet(viewsets.ModelViewSet):
|
|
"""ViewSet for predictive models management"""
|
|
|
|
queryset = PredictiveModel.objects.all()
|
|
serializer_class = PredictiveModelSerializer
|
|
pagination_class = StandardResultsSetPagination
|
|
permission_classes = [permissions.IsAuthenticated]
|
|
|
|
def get_queryset(self):
|
|
"""Filter queryset based on user permissions"""
|
|
queryset = super().get_queryset()
|
|
|
|
# Filter by model type
|
|
model_type = self.request.query_params.get('model_type')
|
|
if model_type:
|
|
queryset = queryset.filter(model_type=model_type)
|
|
|
|
# Filter by status
|
|
status_filter = self.request.query_params.get('status')
|
|
if status_filter:
|
|
queryset = queryset.filter(status=status_filter)
|
|
|
|
return queryset.order_by('-created_at')
|
|
|
|
@action(detail=True, methods=['post'])
|
|
def train(self, request, pk=None):
|
|
"""Trigger model training"""
|
|
model = self.get_object()
|
|
|
|
# Update model status to training
|
|
model.status = 'TRAINING'
|
|
model.save()
|
|
|
|
# Here you would typically trigger the actual training process
|
|
# For now, we'll just return a success response
|
|
|
|
return Response({
|
|
'message': f'Training started for model {model.name}',
|
|
'model_id': str(model.id),
|
|
'status': model.status
|
|
}, status=status.HTTP_202_ACCEPTED)
|
|
|
|
@action(detail=True, methods=['get'])
|
|
def performance(self, request, pk=None):
|
|
"""Get model performance metrics"""
|
|
model = self.get_object()
|
|
|
|
performance_data = {
|
|
'accuracy': model.accuracy_score,
|
|
'precision': model.precision_score,
|
|
'recall': model.recall_score,
|
|
'f1_score': model.f1_score,
|
|
'training_samples': model.training_samples_count,
|
|
'last_trained': model.last_trained_at,
|
|
'training_duration': model.training_duration_seconds,
|
|
'insight_count': model.insights.count(),
|
|
'anomaly_detection_count': model.anomaly_detections.count()
|
|
}
|
|
|
|
return Response(performance_data)
|
|
|
|
|
|
class AnomalyDetectionViewSet(viewsets.ReadOnlyModelViewSet):
|
|
"""ViewSet for anomaly detection results"""
|
|
|
|
queryset = AnomalyDetection.objects.all()
|
|
serializer_class = AnomalyDetectionSerializer
|
|
pagination_class = StandardResultsSetPagination
|
|
permission_classes = [permissions.IsAuthenticated]
|
|
|
|
def get_queryset(self):
|
|
"""Filter queryset based on query parameters"""
|
|
queryset = super().get_queryset()
|
|
|
|
# Filter by anomaly type
|
|
anomaly_type = self.request.query_params.get('anomaly_type')
|
|
if anomaly_type:
|
|
queryset = queryset.filter(anomaly_type=anomaly_type)
|
|
|
|
# Filter by severity
|
|
severity = self.request.query_params.get('severity')
|
|
if severity:
|
|
queryset = queryset.filter(severity=severity)
|
|
|
|
# Filter by status
|
|
status_filter = self.request.query_params.get('status')
|
|
if status_filter:
|
|
queryset = queryset.filter(status=status_filter)
|
|
|
|
# Filter by date range
|
|
start_date = self.request.query_params.get('start_date')
|
|
end_date = self.request.query_params.get('end_date')
|
|
|
|
if start_date:
|
|
queryset = queryset.filter(detected_at__gte=start_date)
|
|
if end_date:
|
|
queryset = queryset.filter(detected_at__lte=end_date)
|
|
|
|
return queryset.order_by('-detected_at')
|
|
|
|
@action(detail=False, methods=['get'])
|
|
def summary(self, request):
|
|
"""Get anomaly detection summary"""
|
|
queryset = self.get_queryset()
|
|
|
|
# Calculate summary statistics
|
|
total_anomalies = queryset.count()
|
|
critical_anomalies = queryset.filter(severity='CRITICAL').count()
|
|
high_anomalies = queryset.filter(severity='HIGH').count()
|
|
medium_anomalies = queryset.filter(severity='MEDIUM').count()
|
|
low_anomalies = queryset.filter(severity='LOW').count()
|
|
unresolved_anomalies = queryset.filter(status__in=['DETECTED', 'INVESTIGATING']).count()
|
|
|
|
# Calculate false positive rate (simplified)
|
|
false_positives = queryset.filter(status='FALSE_POSITIVE').count()
|
|
false_positive_rate = (false_positives / total_anomalies * 100) if total_anomalies > 0 else 0
|
|
|
|
# Calculate average resolution time
|
|
resolved_anomalies = queryset.filter(status='RESOLVED', resolved_at__isnull=False)
|
|
if resolved_anomalies.exists():
|
|
avg_resolution_time = resolved_anomalies.aggregate(
|
|
avg_time=Avg('resolved_at' - 'detected_at')
|
|
)['avg_time']
|
|
else:
|
|
avg_resolution_time = None
|
|
|
|
summary_data = {
|
|
'total_anomalies': total_anomalies,
|
|
'critical_anomalies': critical_anomalies,
|
|
'high_anomalies': high_anomalies,
|
|
'medium_anomalies': medium_anomalies,
|
|
'low_anomalies': low_anomalies,
|
|
'unresolved_anomalies': unresolved_anomalies,
|
|
'false_positive_rate': Decimal(str(false_positive_rate)),
|
|
'average_resolution_time': avg_resolution_time
|
|
}
|
|
|
|
serializer = AnomalySummarySerializer(summary_data)
|
|
return Response(serializer.data)
|
|
|
|
@action(detail=True, methods=['post'])
|
|
def acknowledge(self, request, pk=None):
|
|
"""Acknowledge an anomaly detection"""
|
|
anomaly = self.get_object()
|
|
|
|
if anomaly.status == 'DETECTED':
|
|
anomaly.status = 'INVESTIGATING'
|
|
anomaly.save()
|
|
|
|
return Response({
|
|
'message': 'Anomaly acknowledged and moved to investigating status',
|
|
'anomaly_id': str(anomaly.id),
|
|
'status': anomaly.status
|
|
})
|
|
|
|
return Response({
|
|
'error': 'Anomaly is not in DETECTED status'
|
|
}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
@action(detail=True, methods=['post'])
|
|
def resolve(self, request, pk=None):
|
|
"""Resolve an anomaly detection"""
|
|
anomaly = self.get_object()
|
|
|
|
if anomaly.status in ['DETECTED', 'INVESTIGATING', 'CONFIRMED']:
|
|
anomaly.status = 'RESOLVED'
|
|
anomaly.resolved_at = timezone.now()
|
|
anomaly.resolved_by = request.user
|
|
anomaly.save()
|
|
|
|
return Response({
|
|
'message': 'Anomaly resolved',
|
|
'anomaly_id': str(anomaly.id),
|
|
'status': anomaly.status,
|
|
'resolved_at': anomaly.resolved_at
|
|
})
|
|
|
|
return Response({
|
|
'error': 'Anomaly cannot be resolved in current status'
|
|
}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
|
class CostImpactAnalysisViewSet(viewsets.ReadOnlyModelViewSet):
|
|
"""ViewSet for cost impact analysis"""
|
|
|
|
queryset = CostImpactAnalysis.objects.all()
|
|
serializer_class = CostImpactAnalysisSerializer
|
|
pagination_class = StandardResultsSetPagination
|
|
permission_classes = [permissions.IsAuthenticated]
|
|
|
|
def get_queryset(self):
|
|
"""Filter queryset based on query parameters"""
|
|
queryset = super().get_queryset()
|
|
|
|
# Filter by cost type
|
|
cost_type = self.request.query_params.get('cost_type')
|
|
if cost_type:
|
|
queryset = queryset.filter(cost_type=cost_type)
|
|
|
|
# Filter by validation status
|
|
is_validated = self.request.query_params.get('is_validated')
|
|
if is_validated is not None:
|
|
queryset = queryset.filter(is_validated=is_validated.lower() == 'true')
|
|
|
|
# Filter by date range
|
|
start_date = self.request.query_params.get('start_date')
|
|
end_date = self.request.query_params.get('end_date')
|
|
|
|
if start_date:
|
|
queryset = queryset.filter(created_at__gte=start_date)
|
|
if end_date:
|
|
queryset = queryset.filter(created_at__lte=end_date)
|
|
|
|
return queryset.order_by('-created_at')
|
|
|
|
@action(detail=False, methods=['get'])
|
|
def summary(self, request):
|
|
"""Get cost impact summary"""
|
|
queryset = self.get_queryset()
|
|
|
|
# Calculate summary statistics
|
|
total_cost = queryset.aggregate(total=Sum('cost_amount'))['total'] or Decimal('0')
|
|
downtime_cost = queryset.filter(cost_type='DOWNTIME').aggregate(total=Sum('cost_amount'))['total'] or Decimal('0')
|
|
lost_revenue = queryset.filter(cost_type='LOST_REVENUE').aggregate(total=Sum('cost_amount'))['total'] or Decimal('0')
|
|
penalty_cost = queryset.filter(cost_type='PENALTY').aggregate(total=Sum('cost_amount'))['total'] or Decimal('0')
|
|
resource_cost = queryset.filter(cost_type='RESOURCE_COST').aggregate(total=Sum('cost_amount'))['total'] or Decimal('0')
|
|
|
|
total_downtime_hours = queryset.aggregate(total=Sum('downtime_hours'))['total'] or Decimal('0')
|
|
total_affected_users = queryset.aggregate(total=Sum('affected_users'))['total'] or 0
|
|
|
|
# Calculate derived metrics
|
|
cost_per_hour = (total_cost / total_downtime_hours) if total_downtime_hours > 0 else Decimal('0')
|
|
cost_per_user = (total_cost / total_affected_users) if total_affected_users > 0 else Decimal('0')
|
|
|
|
summary_data = {
|
|
'total_cost': total_cost,
|
|
'currency': 'USD',
|
|
'downtime_cost': downtime_cost,
|
|
'lost_revenue': lost_revenue,
|
|
'penalty_cost': penalty_cost,
|
|
'resource_cost': resource_cost,
|
|
'total_downtime_hours': total_downtime_hours,
|
|
'total_affected_users': total_affected_users,
|
|
'cost_per_hour': cost_per_hour,
|
|
'cost_per_user': cost_per_user
|
|
}
|
|
|
|
serializer = CostSummarySerializer(summary_data)
|
|
return Response(serializer.data)
|
|
|
|
|
|
class DashboardConfigurationViewSet(viewsets.ModelViewSet):
|
|
"""ViewSet for dashboard configurations"""
|
|
|
|
queryset = DashboardConfiguration.objects.all()
|
|
serializer_class = DashboardConfigurationSerializer
|
|
pagination_class = StandardResultsSetPagination
|
|
permission_classes = [permissions.IsAuthenticated]
|
|
|
|
def get_queryset(self):
|
|
"""Filter queryset based on user permissions"""
|
|
queryset = super().get_queryset()
|
|
|
|
# Filter by dashboard type
|
|
dashboard_type = self.request.query_params.get('dashboard_type')
|
|
if dashboard_type:
|
|
queryset = queryset.filter(dashboard_type=dashboard_type)
|
|
|
|
# Filter by active status
|
|
is_active = self.request.query_params.get('is_active')
|
|
if is_active is not None:
|
|
queryset = queryset.filter(is_active=is_active.lower() == 'true')
|
|
|
|
# Filter by public dashboards or user's accessible dashboards
|
|
if not self.request.user.is_staff:
|
|
queryset = queryset.filter(
|
|
Q(is_public=True) | Q(allowed_users=self.request.user)
|
|
)
|
|
|
|
return queryset.order_by('name')
|
|
|
|
@action(detail=True, methods=['get'])
|
|
def data(self, request, pk=None):
|
|
"""Get dashboard data"""
|
|
dashboard = self.get_object()
|
|
|
|
# Check if user has access to this dashboard
|
|
if not dashboard.is_public and self.request.user not in dashboard.allowed_users.all():
|
|
return Response({
|
|
'error': 'Access denied to this dashboard'
|
|
}, status=status.HTTP_403_FORBIDDEN)
|
|
|
|
# Get KPI summary
|
|
kpi_metrics = KPIMetric.objects.filter(is_active=True)
|
|
kpi_summaries = []
|
|
for metric in kpi_metrics:
|
|
latest_measurement = metric.measurements.first()
|
|
if latest_measurement:
|
|
kpi_summaries.append({
|
|
'metric_type': metric.metric_type,
|
|
'metric_name': metric.name,
|
|
'current_value': latest_measurement.value,
|
|
'unit': latest_measurement.unit,
|
|
'trend': 'stable', # Simplified
|
|
'trend_percentage': Decimal('0.00'),
|
|
'period_start': latest_measurement.measurement_period_start,
|
|
'period_end': latest_measurement.measurement_period_end,
|
|
'incident_count': latest_measurement.incident_count,
|
|
'target_value': None,
|
|
'target_met': True
|
|
})
|
|
|
|
# Get anomaly summary
|
|
anomalies = AnomalyDetection.objects.all()
|
|
anomaly_summary = {
|
|
'total_anomalies': anomalies.count(),
|
|
'critical_anomalies': anomalies.filter(severity='CRITICAL').count(),
|
|
'high_anomalies': anomalies.filter(severity='HIGH').count(),
|
|
'medium_anomalies': anomalies.filter(severity='MEDIUM').count(),
|
|
'low_anomalies': anomalies.filter(severity='LOW').count(),
|
|
'unresolved_anomalies': anomalies.filter(status__in=['DETECTED', 'INVESTIGATING']).count(),
|
|
'false_positive_rate': Decimal('0.00'), # Simplified
|
|
'average_resolution_time': None
|
|
}
|
|
|
|
# Get cost summary
|
|
cost_analyses = CostImpactAnalysis.objects.all()
|
|
cost_summary = {
|
|
'total_cost': cost_analyses.aggregate(total=Sum('cost_amount'))['total'] or Decimal('0'),
|
|
'currency': 'USD',
|
|
'downtime_cost': cost_analyses.filter(cost_type='DOWNTIME').aggregate(total=Sum('cost_amount'))['total'] or Decimal('0'),
|
|
'lost_revenue': cost_analyses.filter(cost_type='LOST_REVENUE').aggregate(total=Sum('cost_amount'))['total'] or Decimal('0'),
|
|
'penalty_cost': cost_analyses.filter(cost_type='PENALTY').aggregate(total=Sum('cost_amount'))['total'] or Decimal('0'),
|
|
'resource_cost': cost_analyses.filter(cost_type='RESOURCE_COST').aggregate(total=Sum('cost_amount'))['total'] or Decimal('0'),
|
|
'total_downtime_hours': cost_analyses.aggregate(total=Sum('downtime_hours'))['total'] or Decimal('0'),
|
|
'total_affected_users': cost_analyses.aggregate(total=Sum('affected_users'))['total'] or 0,
|
|
'cost_per_hour': Decimal('0.00'),
|
|
'cost_per_user': Decimal('0.00')
|
|
}
|
|
|
|
# Get insight summary
|
|
insights = PredictiveInsight.objects.all()
|
|
insight_summary = {
|
|
'total_insights': insights.count(),
|
|
'high_confidence_insights': insights.filter(confidence_level='HIGH').count(),
|
|
'medium_confidence_insights': insights.filter(confidence_level='MEDIUM').count(),
|
|
'low_confidence_insights': insights.filter(confidence_level='LOW').count(),
|
|
'acknowledged_insights': insights.filter(is_acknowledged=True).count(),
|
|
'validated_insights': insights.filter(is_validated=True).count(),
|
|
'expired_insights': insights.filter(expires_at__lt=timezone.now()).count(),
|
|
'average_accuracy': Decimal('0.00'),
|
|
'active_models': PredictiveModel.objects.filter(status='ACTIVE').count()
|
|
}
|
|
|
|
# Get recent data
|
|
recent_anomalies = anomalies.order_by('-detected_at')[:5]
|
|
recent_insights = insights.order_by('-generated_at')[:5]
|
|
heatmap_data = HeatmapData.objects.all()[:3]
|
|
|
|
dashboard_data = {
|
|
'kpi_summary': kpi_summaries,
|
|
'anomaly_summary': anomaly_summary,
|
|
'cost_summary': cost_summary,
|
|
'insight_summary': insight_summary,
|
|
'recent_anomalies': AnomalyDetectionSerializer(recent_anomalies, many=True).data,
|
|
'recent_insights': PredictiveInsightSerializer(recent_insights, many=True).data,
|
|
'heatmap_data': HeatmapDataSerializer(heatmap_data, many=True).data,
|
|
'last_updated': timezone.now()
|
|
}
|
|
|
|
serializer = DashboardDataSerializer(dashboard_data)
|
|
return Response(serializer.data)
|
|
|
|
|
|
class HeatmapDataViewSet(viewsets.ReadOnlyModelViewSet):
|
|
"""ViewSet for heatmap data"""
|
|
|
|
queryset = HeatmapData.objects.all()
|
|
serializer_class = HeatmapDataSerializer
|
|
pagination_class = StandardResultsSetPagination
|
|
permission_classes = [permissions.IsAuthenticated]
|
|
|
|
def get_queryset(self):
|
|
"""Filter queryset based on query parameters"""
|
|
queryset = super().get_queryset()
|
|
|
|
# Filter by heatmap type
|
|
heatmap_type = self.request.query_params.get('heatmap_type')
|
|
if heatmap_type:
|
|
queryset = queryset.filter(heatmap_type=heatmap_type)
|
|
|
|
# Filter by time granularity
|
|
time_granularity = self.request.query_params.get('time_granularity')
|
|
if time_granularity:
|
|
queryset = queryset.filter(time_granularity=time_granularity)
|
|
|
|
return queryset.order_by('-created_at')
|
|
|
|
|
|
class PredictiveInsightViewSet(viewsets.ReadOnlyModelViewSet):
|
|
"""ViewSet for predictive insights"""
|
|
|
|
queryset = PredictiveInsight.objects.all()
|
|
serializer_class = PredictiveInsightSerializer
|
|
pagination_class = StandardResultsSetPagination
|
|
permission_classes = [permissions.IsAuthenticated]
|
|
|
|
def get_queryset(self):
|
|
"""Filter queryset based on query parameters"""
|
|
queryset = super().get_queryset()
|
|
|
|
# Filter by insight type
|
|
insight_type = self.request.query_params.get('insight_type')
|
|
if insight_type:
|
|
queryset = queryset.filter(insight_type=insight_type)
|
|
|
|
# Filter by confidence level
|
|
confidence_level = self.request.query_params.get('confidence_level')
|
|
if confidence_level:
|
|
queryset = queryset.filter(confidence_level=confidence_level)
|
|
|
|
# Filter by acknowledgment status
|
|
is_acknowledged = self.request.query_params.get('is_acknowledged')
|
|
if is_acknowledged is not None:
|
|
queryset = queryset.filter(is_acknowledged=is_acknowledged.lower() == 'true')
|
|
|
|
# Filter by validation status
|
|
is_validated = self.request.query_params.get('is_validated')
|
|
if is_validated is not None:
|
|
queryset = queryset.filter(is_validated=is_validated.lower() == 'true')
|
|
|
|
# Filter by expiry
|
|
include_expired = self.request.query_params.get('include_expired', 'false')
|
|
if include_expired.lower() != 'true':
|
|
queryset = queryset.filter(expires_at__gt=timezone.now())
|
|
|
|
return queryset.order_by('-generated_at')
|
|
|
|
@action(detail=True, methods=['post'])
|
|
def acknowledge(self, request, pk=None):
|
|
"""Acknowledge a predictive insight"""
|
|
insight = self.get_object()
|
|
|
|
if not insight.is_acknowledged:
|
|
insight.is_acknowledged = True
|
|
insight.acknowledged_by = request.user
|
|
insight.acknowledged_at = timezone.now()
|
|
insight.save()
|
|
|
|
return Response({
|
|
'message': 'Insight acknowledged',
|
|
'insight_id': str(insight.id),
|
|
'acknowledged_at': insight.acknowledged_at
|
|
})
|
|
|
|
return Response({
|
|
'error': 'Insight is already acknowledged'
|
|
}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
@action(detail=False, methods=['get'])
|
|
def summary(self, request):
|
|
"""Get predictive insight summary"""
|
|
queryset = self.get_queryset()
|
|
|
|
# Calculate summary statistics
|
|
total_insights = queryset.count()
|
|
high_confidence_insights = queryset.filter(confidence_level='HIGH').count()
|
|
medium_confidence_insights = queryset.filter(confidence_level='MEDIUM').count()
|
|
low_confidence_insights = queryset.filter(confidence_level='LOW').count()
|
|
acknowledged_insights = queryset.filter(is_acknowledged=True).count()
|
|
validated_insights = queryset.filter(is_validated=True).count()
|
|
expired_insights = queryset.filter(expires_at__lt=timezone.now()).count()
|
|
|
|
# Calculate average accuracy
|
|
validated_insights_with_accuracy = queryset.filter(
|
|
is_validated=True,
|
|
validation_accuracy__isnull=False
|
|
)
|
|
if validated_insights_with_accuracy.exists():
|
|
avg_accuracy = validated_insights_with_accuracy.aggregate(
|
|
avg=Avg('validation_accuracy')
|
|
)['avg']
|
|
else:
|
|
avg_accuracy = None
|
|
|
|
active_models = PredictiveModel.objects.filter(status='ACTIVE').count()
|
|
|
|
summary_data = {
|
|
'total_insights': total_insights,
|
|
'high_confidence_insights': high_confidence_insights,
|
|
'medium_confidence_insights': medium_confidence_insights,
|
|
'low_confidence_insights': low_confidence_insights,
|
|
'acknowledged_insights': acknowledged_insights,
|
|
'validated_insights': validated_insights,
|
|
'expired_insights': expired_insights,
|
|
'average_accuracy': avg_accuracy,
|
|
'active_models': active_models
|
|
}
|
|
|
|
serializer = PredictiveInsightSummarySerializer(summary_data)
|
|
return Response(serializer.data)
|