Files
Iliyan Angelov 6b247e5b9f Updates
2025-09-19 11:58:53 +03:00

355 lines
13 KiB
Python

"""
Admin configuration for incident intelligence
"""
from django.contrib import admin
from django.utils.html import format_html
from django.urls import reverse
from django.utils.safestring import mark_safe
from .models import (
Incident, IncidentClassification, SeveritySuggestion, IncidentCorrelation,
DuplicationDetection, IncidentPattern, AIProcessingLog
)
@admin.register(Incident)
class IncidentAdmin(admin.ModelAdmin):
"""Admin interface for Incident model"""
list_display = [
'title', 'severity', 'status', 'category', 'assigned_to', 'reporter',
'created_at', 'ai_processed', 'is_duplicate', 'ai_analysis_link'
]
list_filter = [
'severity', 'status', 'category', 'ai_processed', 'is_duplicate',
'created_at', 'assigned_to', 'reporter'
]
search_fields = ['title', 'description', 'free_text']
readonly_fields = [
'id', 'created_at', 'updated_at', 'ai_processed', 'ai_processing_error',
'last_ai_analysis', 'classification_confidence', 'severity_confidence',
'duplicate_confidence'
]
fieldsets = (
('Basic Information', {
'fields': ('id', 'title', 'description', 'free_text')
}),
('Classification', {
'fields': ('category', 'subcategory', 'classification_confidence')
}),
('Severity & Priority', {
'fields': ('severity', 'suggested_severity', 'severity_confidence', 'priority')
}),
('Status & Assignment', {
'fields': ('status', 'assigned_to', 'reporter')
}),
('Impact & Business Context', {
'fields': ('affected_users', 'business_impact', 'estimated_downtime')
}),
('AI Processing', {
'fields': ('ai_processed', 'ai_processing_error', 'last_ai_analysis'),
'classes': ('collapse',)
}),
('Duplication Detection', {
'fields': ('is_duplicate', 'original_incident', 'duplicate_confidence'),
'classes': ('collapse',)
}),
('Timestamps', {
'fields': ('created_at', 'updated_at', 'resolved_at'),
'classes': ('collapse',)
})
)
def ai_analysis_link(self, obj):
"""Link to AI analysis details"""
if obj.ai_processed:
url = reverse('admin:incident_intelligence_incident_analysis', args=[obj.pk])
return format_html('<a href="{}">View Analysis</a>', url)
return "Not Processed"
ai_analysis_link.short_description = "AI Analysis"
actions = ['trigger_ai_analysis', 'mark_as_duplicate', 'mark_as_resolved']
def trigger_ai_analysis(self, request, queryset):
"""Trigger AI analysis for selected incidents"""
from .tasks import batch_process_incidents_ai
incident_ids = [str(incident.id) for incident in queryset]
batch_process_incidents_ai.delay(incident_ids)
self.message_user(request, f"AI analysis triggered for {len(incident_ids)} incidents")
trigger_ai_analysis.short_description = "Trigger AI Analysis"
def mark_as_duplicate(self, request, queryset):
"""Mark selected incidents as duplicates"""
count = queryset.update(is_duplicate=True)
self.message_user(request, f"Marked {count} incidents as duplicates")
mark_as_duplicate.short_description = "Mark as Duplicates"
def mark_as_resolved(self, request, queryset):
"""Mark selected incidents as resolved"""
count = queryset.update(status='RESOLVED')
self.message_user(request, f"Marked {count} incidents as resolved")
mark_as_resolved.short_description = "Mark as Resolved"
@admin.register(IncidentClassification)
class IncidentClassificationAdmin(admin.ModelAdmin):
"""Admin interface for IncidentClassification model"""
list_display = [
'incident', 'predicted_category', 'predicted_subcategory',
'confidence_score', 'model_version', 'created_at'
]
list_filter = ['predicted_category', 'predicted_subcategory', 'model_version', 'created_at']
search_fields = ['incident__title', 'predicted_category', 'predicted_subcategory']
readonly_fields = ['created_at']
fieldsets = (
('Classification Results', {
'fields': ('incident', 'predicted_category', 'predicted_subcategory', 'confidence_score')
}),
('Alternative Classifications', {
'fields': ('alternative_categories',),
'classes': ('collapse',)
}),
('NLP Analysis', {
'fields': ('extracted_keywords', 'sentiment_score', 'urgency_indicators'),
'classes': ('collapse',)
}),
('Processing Metadata', {
'fields': ('model_version', 'processing_time', 'created_at'),
'classes': ('collapse',)
})
)
@admin.register(SeveritySuggestion)
class SeveritySuggestionAdmin(admin.ModelAdmin):
"""Admin interface for SeveritySuggestion model"""
list_display = [
'incident', 'suggested_severity', 'confidence_score',
'user_impact_score', 'business_impact_score', 'technical_impact_score',
'created_at'
]
list_filter = ['suggested_severity', 'model_version', 'created_at']
search_fields = ['incident__title', 'reasoning']
readonly_fields = ['created_at']
fieldsets = (
('Severity Prediction', {
'fields': ('incident', 'suggested_severity', 'confidence_score')
}),
('Impact Analysis', {
'fields': ('user_impact_score', 'business_impact_score', 'technical_impact_score')
}),
('Reasoning', {
'fields': ('reasoning', 'impact_factors'),
'classes': ('collapse',)
}),
('Processing Metadata', {
'fields': ('model_version', 'processing_time', 'created_at'),
'classes': ('collapse',)
})
)
@admin.register(IncidentCorrelation)
class IncidentCorrelationAdmin(admin.ModelAdmin):
"""Admin interface for IncidentCorrelation model"""
list_display = [
'primary_incident', 'related_incident', 'correlation_type',
'correlation_strength', 'confidence_score', 'is_problem_indicator',
'created_at'
]
list_filter = [
'correlation_type', 'correlation_strength', 'is_problem_indicator',
'model_version', 'created_at'
]
search_fields = ['primary_incident__title', 'related_incident__title']
readonly_fields = ['created_at']
fieldsets = (
('Correlation Details', {
'fields': ('primary_incident', 'related_incident', 'correlation_type', 'correlation_strength')
}),
('Analysis Results', {
'fields': ('confidence_score', 'similarity_score', 'time_difference')
}),
('Shared Elements', {
'fields': ('shared_keywords',),
'classes': ('collapse',)
}),
('Problem Detection', {
'fields': ('is_problem_indicator', 'problem_description'),
'classes': ('collapse',)
}),
('Metadata', {
'fields': ('model_version', 'created_at'),
'classes': ('collapse',)
})
)
@admin.register(DuplicationDetection)
class DuplicationDetectionAdmin(admin.ModelAdmin):
"""Admin interface for DuplicationDetection model"""
list_display = [
'incident_a', 'incident_b', 'duplication_type', 'confidence_score',
'recommended_action', 'status', 'created_at'
]
list_filter = [
'duplication_type', 'recommended_action', 'status', 'model_version', 'created_at'
]
search_fields = ['incident_a__title', 'incident_b__title']
readonly_fields = ['created_at', 'reviewed_at']
fieldsets = (
('Incident Pair', {
'fields': ('incident_a', 'incident_b')
}),
('Duplication Analysis', {
'fields': ('duplication_type', 'confidence_score', 'similarity_score')
}),
('Similarity Breakdown', {
'fields': ('text_similarity', 'temporal_proximity', 'service_similarity'),
'classes': ('collapse',)
}),
('Recommendation', {
'fields': ('recommended_action', 'merge_confidence', 'reasoning')
}),
('Shared Elements', {
'fields': ('shared_elements',),
'classes': ('collapse',)
}),
('Status & Review', {
'fields': ('status', 'reviewed_at', 'reviewed_by'),
'classes': ('collapse',)
}),
('Metadata', {
'fields': ('model_version', 'created_at'),
'classes': ('collapse',)
})
)
actions = ['approve_merge', 'reject_merge']
def approve_merge(self, request, queryset):
"""Approve merging of selected duplications"""
from .tasks import merge_duplicate_incidents
for duplication in queryset.filter(status='DETECTED'):
duplication.status = 'REVIEWED'
duplication.reviewed_by = request.user
duplication.save()
merge_duplicate_incidents.delay(duplication.id)
self.message_user(request, f"Approved {queryset.count()} duplications for merging")
approve_merge.short_description = "Approve Merge"
def reject_merge(self, request, queryset):
"""Reject merging of selected duplications"""
count = queryset.filter(status='DETECTED').update(
status='REJECTED',
reviewed_by=request.user
)
self.message_user(request, f"Rejected {count} duplications")
reject_merge.short_description = "Reject Merge"
@admin.register(IncidentPattern)
class IncidentPatternAdmin(admin.ModelAdmin):
"""Admin interface for IncidentPattern model"""
list_display = [
'name', 'pattern_type', 'incident_count', 'confidence_score',
'is_active', 'is_resolved', 'created_at'
]
list_filter = ['pattern_type', 'is_active', 'is_resolved', 'model_version', 'created_at']
search_fields = ['name', 'description']
readonly_fields = ['incident_count', 'created_at', 'updated_at']
filter_horizontal = ['incidents']
fieldsets = (
('Pattern Information', {
'fields': ('name', 'pattern_type', 'description')
}),
('Pattern Characteristics', {
'fields': ('frequency', 'affected_services', 'common_keywords')
}),
('Related Incidents', {
'fields': ('incidents', 'incident_count')
}),
('Analysis Results', {
'fields': ('confidence_score', 'last_occurrence', 'next_predicted_occurrence')
}),
('Status', {
'fields': ('is_active', 'is_resolved')
}),
('Metadata', {
'fields': ('model_version', 'created_at', 'updated_at'),
'classes': ('collapse',)
})
)
actions = ['resolve_pattern', 'activate_pattern']
def resolve_pattern(self, request, queryset):
"""Mark selected patterns as resolved"""
count = queryset.update(is_resolved=True, is_active=False)
self.message_user(request, f"Resolved {count} patterns")
resolve_pattern.short_description = "Resolve Patterns"
def activate_pattern(self, request, queryset):
"""Activate selected patterns"""
count = queryset.update(is_active=True, is_resolved=False)
self.message_user(request, f"Activated {count} patterns")
activate_pattern.short_description = "Activate Patterns"
@admin.register(AIProcessingLog)
class AIProcessingLogAdmin(admin.ModelAdmin):
"""Admin interface for AIProcessingLog model"""
list_display = [
'processing_type', 'incident', 'status', 'processing_time',
'confidence_score', 'started_at', 'completed_at'
]
list_filter = [
'processing_type', 'status', 'model_version', 'started_at'
]
search_fields = ['incident__title', 'error_message']
readonly_fields = ['started_at', 'completed_at']
fieldsets = (
('Processing Details', {
'fields': ('processing_type', 'status', 'incident', 'related_incidents')
}),
('Input/Output Data', {
'fields': ('input_data', 'output_data'),
'classes': ('collapse',)
}),
('Error Information', {
'fields': ('error_message',),
'classes': ('collapse',)
}),
('Performance Metrics', {
'fields': ('processing_time', 'confidence_score', 'model_version')
}),
('Timestamps', {
'fields': ('started_at', 'completed_at'),
'classes': ('collapse',)
})
)
def has_add_permission(self, request):
"""Disable adding new processing logs"""
return False
def has_change_permission(self, request, obj=None):
"""Disable editing processing logs"""
return False
# Custom admin site configuration
admin.site.site_header = "ETB Incident Intelligence Admin"
admin.site.site_title = "Incident Intelligence"
admin.site.index_title = "Incident Intelligence Administration"