272 lines
11 KiB
Python
272 lines
11 KiB
Python
"""
|
|
Serializers for incident intelligence models
|
|
"""
|
|
from rest_framework import serializers
|
|
from django.contrib.auth import get_user_model
|
|
from ..models import (
|
|
Incident, IncidentClassification, SeveritySuggestion, IncidentCorrelation,
|
|
DuplicationDetection, IncidentPattern, AIProcessingLog
|
|
)
|
|
|
|
User = get_user_model()
|
|
|
|
|
|
class IncidentSerializer(serializers.ModelSerializer):
|
|
"""Serializer for Incident model"""
|
|
|
|
reporter_name = serializers.CharField(source='reporter.get_full_name', read_only=True)
|
|
assigned_to_name = serializers.CharField(source='assigned_to.get_full_name', read_only=True)
|
|
resolution_time_display = serializers.SerializerMethodField()
|
|
is_resolved = serializers.BooleanField(read_only=True)
|
|
|
|
class Meta:
|
|
model = Incident
|
|
fields = [
|
|
'id', 'title', 'description', 'free_text', 'category', 'subcategory',
|
|
'classification_confidence', 'severity', 'suggested_severity',
|
|
'severity_confidence', 'priority', 'status', 'assigned_to', 'assigned_to_name',
|
|
'reporter', 'reporter_name', 'created_at', 'updated_at', 'resolved_at',
|
|
'affected_users', 'business_impact', 'estimated_downtime', 'ai_processed',
|
|
'ai_processing_error', 'last_ai_analysis', 'is_duplicate', 'original_incident',
|
|
'duplicate_confidence', 'resolution_time_display', 'is_resolved',
|
|
'data_classification', 'security_clearance_required', 'is_sensitive'
|
|
]
|
|
read_only_fields = ['id', 'created_at', 'updated_at', 'ai_processed', 'ai_processing_error']
|
|
|
|
def get_resolution_time_display(self, obj):
|
|
"""Get human-readable resolution time"""
|
|
if obj.resolution_time:
|
|
total_seconds = obj.resolution_time.total_seconds()
|
|
hours = int(total_seconds // 3600)
|
|
minutes = int((total_seconds % 3600) // 60)
|
|
if hours > 0:
|
|
return f"{hours}h {minutes}m"
|
|
else:
|
|
return f"{minutes}m"
|
|
return None
|
|
|
|
|
|
class IncidentCreateSerializer(serializers.ModelSerializer):
|
|
"""Serializer for creating new incidents"""
|
|
|
|
class Meta:
|
|
model = Incident
|
|
fields = [
|
|
'title', 'description', 'free_text', 'affected_users', 'business_impact',
|
|
'estimated_downtime', 'reporter'
|
|
]
|
|
|
|
def create(self, validated_data):
|
|
"""Create a new incident and trigger AI processing"""
|
|
incident = super().create(validated_data)
|
|
|
|
# Trigger AI processing asynchronously
|
|
from ..tasks import process_incident_ai
|
|
process_incident_ai.delay(incident.id)
|
|
|
|
return incident
|
|
|
|
|
|
class IncidentUpdateSerializer(serializers.ModelSerializer):
|
|
"""Serializer for updating incidents"""
|
|
|
|
class Meta:
|
|
model = Incident
|
|
fields = [
|
|
'title', 'description', 'category', 'subcategory', 'severity', 'priority',
|
|
'status', 'assigned_to', 'affected_users', 'business_impact', 'estimated_downtime'
|
|
]
|
|
|
|
def update(self, instance, validated_data):
|
|
"""Update incident and trigger re-analysis if needed"""
|
|
# Check if fields that affect AI analysis have changed
|
|
ai_relevant_fields = ['title', 'description', 'category', 'subcategory', 'severity']
|
|
needs_reanalysis = any(field in validated_data for field in ai_relevant_fields)
|
|
|
|
incident = super().update(instance, validated_data)
|
|
|
|
# Trigger re-analysis if needed
|
|
if needs_reanalysis:
|
|
from ..tasks import process_incident_ai
|
|
process_incident_ai.delay(incident.id)
|
|
|
|
return incident
|
|
|
|
|
|
class IncidentClassificationSerializer(serializers.ModelSerializer):
|
|
"""Serializer for IncidentClassification model"""
|
|
|
|
class Meta:
|
|
model = IncidentClassification
|
|
fields = [
|
|
'id', 'incident', 'predicted_category', 'predicted_subcategory',
|
|
'confidence_score', 'alternative_categories', 'extracted_keywords',
|
|
'sentiment_score', 'urgency_indicators', 'model_version',
|
|
'processing_time', 'created_at'
|
|
]
|
|
read_only_fields = ['id', 'created_at']
|
|
|
|
|
|
class SeveritySuggestionSerializer(serializers.ModelSerializer):
|
|
"""Serializer for SeveritySuggestion model"""
|
|
|
|
class Meta:
|
|
model = SeveritySuggestion
|
|
fields = [
|
|
'id', 'incident', 'suggested_severity', 'confidence_score',
|
|
'user_impact_score', 'business_impact_score', 'technical_impact_score',
|
|
'reasoning', 'impact_factors', 'model_version', 'processing_time', 'created_at'
|
|
]
|
|
read_only_fields = ['id', 'created_at']
|
|
|
|
|
|
class IncidentCorrelationSerializer(serializers.ModelSerializer):
|
|
"""Serializer for IncidentCorrelation model"""
|
|
|
|
primary_incident_title = serializers.CharField(source='primary_incident.title', read_only=True)
|
|
related_incident_title = serializers.CharField(source='related_incident.title', read_only=True)
|
|
|
|
class Meta:
|
|
model = IncidentCorrelation
|
|
fields = [
|
|
'id', 'primary_incident', 'primary_incident_title', 'related_incident',
|
|
'related_incident_title', 'correlation_type', 'confidence_score',
|
|
'correlation_strength', 'shared_keywords', 'time_difference',
|
|
'similarity_score', 'is_problem_indicator', 'problem_description',
|
|
'model_version', 'created_at'
|
|
]
|
|
read_only_fields = ['id', 'created_at']
|
|
|
|
|
|
class DuplicationDetectionSerializer(serializers.ModelSerializer):
|
|
"""Serializer for DuplicationDetection model"""
|
|
|
|
incident_a_title = serializers.CharField(source='incident_a.title', read_only=True)
|
|
incident_b_title = serializers.CharField(source='incident_b.title', read_only=True)
|
|
reviewed_by_name = serializers.CharField(source='reviewed_by.get_full_name', read_only=True)
|
|
|
|
class Meta:
|
|
model = DuplicationDetection
|
|
fields = [
|
|
'id', 'incident_a', 'incident_a_title', 'incident_b', 'incident_b_title',
|
|
'duplication_type', 'similarity_score', 'confidence_score',
|
|
'text_similarity', 'temporal_proximity', 'service_similarity',
|
|
'recommended_action', 'merge_confidence', 'reasoning', 'shared_elements',
|
|
'status', 'created_at', 'reviewed_at', 'reviewed_by', 'reviewed_by_name',
|
|
'model_version'
|
|
]
|
|
read_only_fields = ['id', 'created_at', 'reviewed_at']
|
|
|
|
|
|
class IncidentPatternSerializer(serializers.ModelSerializer):
|
|
"""Serializer for IncidentPattern model"""
|
|
|
|
incident_count = serializers.IntegerField(read_only=True)
|
|
|
|
class Meta:
|
|
model = IncidentPattern
|
|
fields = [
|
|
'id', 'name', 'pattern_type', 'description', 'frequency',
|
|
'affected_services', 'common_keywords', 'incidents', 'incident_count',
|
|
'confidence_score', 'last_occurrence', 'next_predicted_occurrence',
|
|
'is_active', 'is_resolved', 'created_at', 'updated_at', 'model_version'
|
|
]
|
|
read_only_fields = ['id', 'created_at', 'updated_at', 'incident_count']
|
|
|
|
|
|
class AIProcessingLogSerializer(serializers.ModelSerializer):
|
|
"""Serializer for AIProcessingLog model"""
|
|
|
|
incident_title = serializers.CharField(source='incident.title', read_only=True)
|
|
|
|
class Meta:
|
|
model = AIProcessingLog
|
|
fields = [
|
|
'id', 'processing_type', 'status', 'incident', 'incident_title',
|
|
'related_incidents', 'input_data', 'output_data', 'error_message',
|
|
'processing_time', 'model_version', 'confidence_score',
|
|
'started_at', 'completed_at'
|
|
]
|
|
read_only_fields = ['id', 'started_at', 'completed_at']
|
|
|
|
|
|
class IncidentAnalysisSerializer(serializers.Serializer):
|
|
"""Serializer for incident analysis results"""
|
|
|
|
incident = IncidentSerializer(read_only=True)
|
|
classification = IncidentClassificationSerializer(read_only=True, allow_null=True)
|
|
severity_suggestion = SeveritySuggestionSerializer(read_only=True, allow_null=True)
|
|
correlations = IncidentCorrelationSerializer(many=True, read_only=True)
|
|
duplications = DuplicationDetectionSerializer(many=True, read_only=True)
|
|
patterns = IncidentPatternSerializer(many=True, read_only=True)
|
|
|
|
def to_representation(self, instance):
|
|
"""Custom representation to include related data"""
|
|
data = super().to_representation(instance)
|
|
|
|
# Add classification data
|
|
try:
|
|
data['classification'] = IncidentClassificationSerializer(instance.ai_classification).data
|
|
except IncidentClassification.DoesNotExist:
|
|
data['classification'] = None
|
|
|
|
# Add severity suggestion data
|
|
try:
|
|
data['severity_suggestion'] = SeveritySuggestionSerializer(instance.severity_suggestion).data
|
|
except SeveritySuggestion.DoesNotExist:
|
|
data['severity_suggestion'] = None
|
|
|
|
# Add correlations
|
|
data['correlations'] = IncidentCorrelationSerializer(
|
|
instance.correlations_as_primary.all()[:10], many=True
|
|
).data
|
|
|
|
# Add duplications
|
|
data['duplications'] = DuplicationDetectionSerializer(
|
|
instance.duplication_as_a.all()[:10], many=True
|
|
).data
|
|
|
|
# Add patterns
|
|
data['patterns'] = IncidentPatternSerializer(
|
|
instance.patterns.all()[:5], many=True
|
|
).data
|
|
|
|
return data
|
|
|
|
|
|
class IncidentSearchSerializer(serializers.Serializer):
|
|
"""Serializer for incident search parameters"""
|
|
|
|
query = serializers.CharField(required=False, help_text="Search query")
|
|
category = serializers.CharField(required=False, help_text="Filter by category")
|
|
severity = serializers.ChoiceField(choices=Incident.SEVERITY_CHOICES, required=False)
|
|
status = serializers.ChoiceField(choices=Incident.STATUS_CHOICES, required=False)
|
|
assigned_to = serializers.IntegerField(required=False, help_text="Filter by assigned user ID")
|
|
reporter = serializers.IntegerField(required=False, help_text="Filter by reporter user ID")
|
|
date_from = serializers.DateTimeField(required=False, help_text="Filter incidents from date")
|
|
date_to = serializers.DateTimeField(required=False, help_text="Filter incidents to date")
|
|
has_ai_analysis = serializers.BooleanField(required=False, help_text="Filter by AI analysis status")
|
|
is_duplicate = serializers.BooleanField(required=False, help_text="Filter by duplication status")
|
|
page = serializers.IntegerField(default=1, min_value=1)
|
|
page_size = serializers.IntegerField(default=20, min_value=1, max_value=100)
|
|
|
|
|
|
class IncidentStatsSerializer(serializers.Serializer):
|
|
"""Serializer for incident statistics"""
|
|
|
|
total_incidents = serializers.IntegerField()
|
|
open_incidents = serializers.IntegerField()
|
|
resolved_incidents = serializers.IntegerField()
|
|
critical_incidents = serializers.IntegerField()
|
|
high_incidents = serializers.IntegerField()
|
|
medium_incidents = serializers.IntegerField()
|
|
low_incidents = serializers.IntegerField()
|
|
average_resolution_time = serializers.DurationField()
|
|
incidents_by_category = serializers.DictField()
|
|
incidents_by_severity = serializers.DictField()
|
|
incidents_by_status = serializers.DictField()
|
|
ai_processed_count = serializers.IntegerField()
|
|
duplicate_count = serializers.IntegerField()
|
|
correlation_count = serializers.IntegerField()
|
|
pattern_count = serializers.IntegerField()
|