updates
This commit is contained in:
@@ -0,0 +1,25 @@
|
||||
from .handlers import EventHandler
|
||||
from .types import (
|
||||
CloseResourcesEvent,
|
||||
CommandErrorEvent,
|
||||
CommandExecutedEvent,
|
||||
FirewallConfiguredEvent,
|
||||
FirewallDisabledEvent,
|
||||
FirewallHeartbeatEvent,
|
||||
FlushSecurityTracesEvent,
|
||||
PackageInstalledEvent,
|
||||
PackageUninstalledEvent,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"EventHandler",
|
||||
"CloseResourcesEvent",
|
||||
"FlushSecurityTracesEvent",
|
||||
"CommandExecutedEvent",
|
||||
"CommandErrorEvent",
|
||||
"PackageInstalledEvent",
|
||||
"PackageUninstalledEvent",
|
||||
"FirewallHeartbeatEvent",
|
||||
"FirewallConfiguredEvent",
|
||||
"FirewallDisabledEvent",
|
||||
]
|
||||
Binary file not shown.
@@ -0,0 +1,7 @@
|
||||
from .bus import EventBus
|
||||
from .utils import start_event_bus
|
||||
|
||||
__all__ = [
|
||||
"EventBus",
|
||||
"start_event_bus",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,348 @@
|
||||
"""
|
||||
Core EventBus implementation.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import queue
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
from concurrent.futures import Future
|
||||
from typing import Dict, List, Any, Optional, Callable, TypeVar
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from ..handlers import EventHandler
|
||||
|
||||
from safety_schemas.models.events import Event, EventTypeBase, PayloadBase
|
||||
|
||||
|
||||
@dataclass
|
||||
class EventBusMetrics:
|
||||
"""
|
||||
Metrics for the event bus.
|
||||
"""
|
||||
|
||||
events_emitted: int = 0
|
||||
events_processed: int = 0
|
||||
events_failed: int = 0
|
||||
queue_high_water_mark: int = 0
|
||||
handler_durations: Dict[str, List[float]] = field(default_factory=dict)
|
||||
|
||||
|
||||
E = TypeVar("E", bound=Event)
|
||||
|
||||
# Define bounded type variables
|
||||
EventTypeT = TypeVar("EventTypeT", bound=EventTypeBase)
|
||||
PayloadT = TypeVar("PayloadT", bound=PayloadBase)
|
||||
|
||||
|
||||
class EventBus:
|
||||
"""
|
||||
Event bus that runs in a separate thread with its own asyncio event loop.
|
||||
|
||||
This class manages event subscription and dispatching across threads.
|
||||
|
||||
This is an approach to leverage asyncio without migrating current codebase
|
||||
to async.
|
||||
"""
|
||||
|
||||
def __init__(self, max_queue_size: int = 1000):
|
||||
"""
|
||||
Initialize the event bus.
|
||||
|
||||
Args:
|
||||
max_queue_size: Maximum number of events that can be queued
|
||||
"""
|
||||
self._handlers: Dict[EventTypeBase, List[EventHandler[Any]]] = {}
|
||||
|
||||
# Queue for passing events from main thread to event bus thread
|
||||
self._event_queue: queue.Queue = queue.Queue(maxsize=max_queue_size)
|
||||
|
||||
# Thread management
|
||||
self._running = False
|
||||
self._thread: Optional[threading.Thread] = None
|
||||
self._loop: Optional[asyncio.AbstractEventLoop] = None
|
||||
self._shutdown_event = threading.Event()
|
||||
|
||||
# Setup logging
|
||||
self.logger = logging.getLogger("event_bus")
|
||||
|
||||
# Metrics
|
||||
self.metrics = EventBusMetrics()
|
||||
|
||||
def subscribe(
|
||||
self, event_types: List[EventTypeBase], handler: EventHandler[E]
|
||||
) -> None:
|
||||
"""
|
||||
Subscribe a handler to one or more event types.
|
||||
|
||||
Args:
|
||||
event_types: The list of event types to subscribe to
|
||||
handler: The handler to register
|
||||
"""
|
||||
for event_type in event_types:
|
||||
if event_type not in self._handlers:
|
||||
self._handlers[event_type] = []
|
||||
|
||||
self.logger.info(
|
||||
f"Registering handler {handler.__class__.__name__} "
|
||||
f"for event type {event_type}"
|
||||
)
|
||||
self._handlers[event_type].append(handler)
|
||||
|
||||
def emit(
|
||||
self,
|
||||
event: Event[EventTypeT, PayloadT],
|
||||
block: bool = False,
|
||||
timeout: Optional[float] = None,
|
||||
) -> Optional[Future]:
|
||||
"""
|
||||
Emit an event to be processed by the event bus.
|
||||
|
||||
Args:
|
||||
event: The event to emit
|
||||
block: Whether to block if the queue is full
|
||||
timeout: How long to wait if blocking
|
||||
|
||||
Returns:
|
||||
Future that will contain the results, or None if the event couldn't be queued
|
||||
"""
|
||||
if not self._running:
|
||||
self.logger.warning("Event bus is not running, but an event was emitted")
|
||||
|
||||
self.metrics.events_emitted += 1
|
||||
|
||||
# Create a future to track the results
|
||||
future = Future()
|
||||
|
||||
try:
|
||||
# Track queue size for metrics
|
||||
current_size = self._event_queue.qsize()
|
||||
self.metrics.queue_high_water_mark = max(
|
||||
current_size, self.metrics.queue_high_water_mark
|
||||
)
|
||||
|
||||
# Put the event in the queue
|
||||
self._event_queue.put((event, future), block=block, timeout=timeout)
|
||||
self.logger.debug("Emitted %s (%s)", event.type, event.id)
|
||||
return future
|
||||
|
||||
except queue.Full:
|
||||
self.logger.error(f"Event queue is full, dropping event: {event}")
|
||||
future.set_exception(RuntimeError("Event queue is full"))
|
||||
return future
|
||||
|
||||
def start(self):
|
||||
if self._running:
|
||||
return
|
||||
|
||||
self._running = True
|
||||
self._shutdown_event.clear()
|
||||
self._thread = threading.Thread(target=self._run_event_loop, daemon=True)
|
||||
self._thread.start()
|
||||
|
||||
def stop(self, timeout=5.0):
|
||||
if not self._running:
|
||||
return True
|
||||
|
||||
self._running = False
|
||||
self._event_queue.put((None, None), block=False) # Send sentinel
|
||||
return self._shutdown_event.wait(timeout)
|
||||
|
||||
def _run_event_loop(self):
|
||||
self._loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self._loop)
|
||||
|
||||
async def main():
|
||||
pending_tasks = set()
|
||||
|
||||
# Process the queue until shutdown
|
||||
while self._running or not self._event_queue.empty():
|
||||
try:
|
||||
# Get the next event with a short timeout
|
||||
try:
|
||||
event, future = self._event_queue.get(timeout=0.1)
|
||||
except queue.Empty:
|
||||
await asyncio.sleep(0.01)
|
||||
continue
|
||||
|
||||
# Check for shutdown sentinel
|
||||
if event is None:
|
||||
self.logger.info("Received shutdown sentinel")
|
||||
break
|
||||
|
||||
# Process the event
|
||||
task = asyncio.create_task(self._dispatch_event(event, future))
|
||||
self.logger.debug(f"Dispatching {event.type} ({event.id})")
|
||||
pending_tasks.add(task)
|
||||
task.add_done_callback(lambda t: pending_tasks.discard(t))
|
||||
except Exception as e:
|
||||
self.logger.exception(f"Error processing event: {e}")
|
||||
|
||||
# Wait for any pending tasks before exiting
|
||||
if pending_tasks:
|
||||
self.logger.info(f"Waiting for {len(pending_tasks)} pending tasks")
|
||||
await asyncio.gather(*pending_tasks, return_exceptions=True)
|
||||
|
||||
try:
|
||||
# Single run_until_complete call for the entire lifecycle
|
||||
self._loop.run_until_complete(main())
|
||||
finally:
|
||||
self._loop.close()
|
||||
self._shutdown_event.set()
|
||||
|
||||
async def _dispatch_event(
|
||||
self, event: Event[EventTypeBase, PayloadBase], future: Future
|
||||
) -> None:
|
||||
"""
|
||||
Dispatch an event to all registered handlers.
|
||||
|
||||
Args:
|
||||
event: The event to dispatch
|
||||
future: Future to set with the results
|
||||
"""
|
||||
results = []
|
||||
|
||||
handlers = self._handlers.get(event.type, [])
|
||||
|
||||
if not handlers:
|
||||
self.logger.warning(f"No handlers registered for event type {event.type}")
|
||||
future.set_result([])
|
||||
return
|
||||
|
||||
# Create tasks for all handlers and run them concurrently
|
||||
tasks = []
|
||||
for handler in handlers:
|
||||
task = asyncio.create_task(self._handle_event(handler, event))
|
||||
tasks.append(task)
|
||||
|
||||
trace_id = event.correlation_id if event.correlation_id else "-"
|
||||
|
||||
self.logger.debug(
|
||||
"Event %s | %s | %s Handler(s) Task(s)", trace_id, event.type, len(tasks)
|
||||
)
|
||||
|
||||
# Wait for all handlers to complete
|
||||
handler_results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
self.logger.info(
|
||||
"Event %s | %s | %s Handler(s) Completed",
|
||||
trace_id,
|
||||
event.type,
|
||||
len(handler_results),
|
||||
)
|
||||
|
||||
# Process results
|
||||
for i, result in enumerate(handler_results):
|
||||
if isinstance(result, Exception):
|
||||
self.logger.error(
|
||||
"Event %s | %s | Handler %d failed: %s",
|
||||
trace_id,
|
||||
event.type,
|
||||
i,
|
||||
str(result),
|
||||
exc_info=result,
|
||||
)
|
||||
else:
|
||||
self.logger.debug(
|
||||
"Event %s | %s | Handler %d succeeded: %s",
|
||||
trace_id,
|
||||
event.type,
|
||||
i,
|
||||
str(result),
|
||||
)
|
||||
results.append(result)
|
||||
|
||||
# Set the result on the future
|
||||
if not future.done():
|
||||
future.set_result(results)
|
||||
|
||||
async def _handle_event(self, handler: EventHandler[E], event: E) -> Any:
|
||||
"""
|
||||
Handle a single event with error handling and metrics.
|
||||
|
||||
Args:
|
||||
handler: The handler to use
|
||||
event: The event to handle
|
||||
|
||||
Returns:
|
||||
The result from the handler
|
||||
"""
|
||||
handler_name = handler.__class__.__name__
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Call the handler
|
||||
result = await handler.handle(event)
|
||||
|
||||
# Record successful processing
|
||||
self.metrics.events_processed += 1
|
||||
|
||||
# Record timing
|
||||
duration = time.time() - start_time
|
||||
if handler_name not in self.metrics.handler_durations:
|
||||
self.metrics.handler_durations[handler_name] = []
|
||||
self.metrics.handler_durations[handler_name].append(duration)
|
||||
|
||||
self.logger.debug(
|
||||
f"Handler {handler_name} processed {event.__class__.__name__} "
|
||||
f"in {duration:.3f}s"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
# Record failure
|
||||
self.metrics.events_failed += 1
|
||||
|
||||
self.logger.exception(
|
||||
f"Handler {handler_name} failed to process {event.__class__.__name__}: {e}"
|
||||
)
|
||||
raise
|
||||
|
||||
def get_metrics(self) -> dict:
|
||||
"""
|
||||
Get the current metrics for the event bus.
|
||||
|
||||
Returns:
|
||||
Dictionary of metrics
|
||||
"""
|
||||
metrics: dict[str, Any] = {
|
||||
"events_emitted": self.metrics.events_emitted,
|
||||
"events_processed": self.metrics.events_processed,
|
||||
"events_failed": self.metrics.events_failed,
|
||||
"current_queue_size": self._event_queue.qsize(),
|
||||
"queue_high_water_mark": self.metrics.queue_high_water_mark,
|
||||
}
|
||||
|
||||
# Add handler metrics
|
||||
handler_metrics = {}
|
||||
for handler_name, durations in self.metrics.handler_durations.items():
|
||||
if not durations:
|
||||
continue
|
||||
|
||||
handler_metrics[handler_name] = {
|
||||
"count": len(durations),
|
||||
"avg_duration": sum(durations) / len(durations),
|
||||
"max_duration": max(durations),
|
||||
"min_duration": min(durations),
|
||||
}
|
||||
|
||||
metrics["handlers"] = handler_metrics
|
||||
return metrics
|
||||
|
||||
def emit_with_callback(
|
||||
self, event: Event, callback: Callable[[List[Any]], None]
|
||||
) -> None:
|
||||
"""
|
||||
Emit an event and register a callback for when it completes.
|
||||
|
||||
Args:
|
||||
event: The event to emit
|
||||
callback: Function to call with the results when complete
|
||||
"""
|
||||
future = self.emit(event)
|
||||
if future:
|
||||
future.add_done_callback(
|
||||
lambda f: callback(f.result()) if not f.exception() else None
|
||||
)
|
||||
@@ -0,0 +1,57 @@
|
||||
from typing import TYPE_CHECKING
|
||||
from .bus import EventBus
|
||||
|
||||
from safety_schemas.models.events import EventType
|
||||
from safety.events.types import InternalEventType
|
||||
from safety.events.handlers import SecurityEventsHandler
|
||||
|
||||
from safety.constants import PLATFORM_API_EVENTS_ENDPOINT
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from safety.models import SafetyCLI
|
||||
from safety.auth.utils import SafetyAuthSession
|
||||
|
||||
|
||||
def start_event_bus(obj: "SafetyCLI", session: "SafetyAuthSession"):
|
||||
"""
|
||||
Initializes the event bus with the default security events handler
|
||||
for authenticated users.
|
||||
This function creates an instance of the EventBus, starts it,
|
||||
and assigns it to the `event_bus` attribute of the provided `obj`.
|
||||
It also initializes the `security_events_handler` with the necessary
|
||||
parameters and subscribes it to a predefined list of events.
|
||||
|
||||
Args:
|
||||
obj (SafetyCLI): The main application object.
|
||||
session (SafetyAuthSession): The authentication session containing
|
||||
the necessary credentials and proxies.
|
||||
|
||||
"""
|
||||
event_bus = EventBus()
|
||||
event_bus.start()
|
||||
obj.event_bus = event_bus
|
||||
|
||||
token = session.token.get("access_token") if session.token else None
|
||||
|
||||
obj.security_events_handler = SecurityEventsHandler(
|
||||
api_endpoint=PLATFORM_API_EVENTS_ENDPOINT,
|
||||
proxies=session.proxies, # type: ignore
|
||||
auth_token=token,
|
||||
api_key=session.api_key,
|
||||
)
|
||||
|
||||
events = [
|
||||
EventType.AUTH_STARTED,
|
||||
EventType.AUTH_COMPLETED,
|
||||
EventType.COMMAND_EXECUTED,
|
||||
EventType.COMMAND_ERROR,
|
||||
InternalEventType.CLOSE_RESOURCES,
|
||||
InternalEventType.FLUSH_SECURITY_TRACES,
|
||||
]
|
||||
|
||||
event_bus.subscribe(events, obj.security_events_handler)
|
||||
|
||||
if obj.firewall_enabled:
|
||||
from safety.firewall.events.utils import register_event_handlers
|
||||
|
||||
register_event_handlers(obj.event_bus, obj=obj)
|
||||
@@ -0,0 +1,5 @@
|
||||
from .base import EventHandler
|
||||
from .common import SecurityEventsHandler
|
||||
|
||||
|
||||
__all__ = ["EventHandler", "SecurityEventsHandler"]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
Event handler definitions for the event bus system.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, TypeVar, Generic
|
||||
|
||||
from safety_schemas.models.events import Event
|
||||
|
||||
# Type variable for event types
|
||||
EventType = TypeVar("EventType", bound=Event)
|
||||
|
||||
|
||||
class EventHandler(Generic[EventType], ABC):
|
||||
"""
|
||||
Abstract base class for event handlers.
|
||||
|
||||
Concrete handlers should implement the handle method.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def handle(self, event: EventType) -> Any:
|
||||
"""
|
||||
Handle an event asynchronously.
|
||||
|
||||
Args:
|
||||
event: The event to handle
|
||||
|
||||
Returns:
|
||||
Any result from handling the event
|
||||
"""
|
||||
pass
|
||||
@@ -0,0 +1,333 @@
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
|
||||
import uuid
|
||||
|
||||
import httpx
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
retry,
|
||||
retry_if_exception_type,
|
||||
stop_after_attempt,
|
||||
wait_exponential,
|
||||
)
|
||||
import tenacity
|
||||
|
||||
from safety.meta import get_identifier, get_meta_http_headers, get_version
|
||||
|
||||
from ..types import (
|
||||
CommandErrorEvent,
|
||||
CommandExecutedEvent,
|
||||
CloseResourcesEvent,
|
||||
InternalEventType,
|
||||
FlushSecurityTracesEvent,
|
||||
)
|
||||
from ..handlers import EventHandler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from safety_schemas.models.events import EventContext
|
||||
from safety.events.utils import InternalPayload
|
||||
from safety.models import SafetyCLI
|
||||
|
||||
SecurityEventTypes = Union[
|
||||
CommandExecutedEvent,
|
||||
CommandErrorEvent,
|
||||
FlushSecurityTracesEvent,
|
||||
CloseResourcesEvent,
|
||||
]
|
||||
|
||||
|
||||
class SecurityEventsHandler(EventHandler[SecurityEventTypes]):
|
||||
"""
|
||||
Handler that collects events in memory and flushes them when requested.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
api_endpoint: str,
|
||||
proxies: Optional[Dict[str, str]] = None,
|
||||
auth_token: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Initialize the telemetry handler.
|
||||
|
||||
Args:
|
||||
api_endpoint: URL to send events to
|
||||
proxies: Optional dictionary of proxy settings
|
||||
auth_token: Optional authentication token for the API
|
||||
api_key: Optional API key for authentication
|
||||
"""
|
||||
self.api_endpoint = api_endpoint
|
||||
self.proxies = proxies
|
||||
self.auth_token = auth_token
|
||||
self.api_key = api_key
|
||||
|
||||
# Storage for collected events
|
||||
self.collected_events: List[Dict[str, Any]] = []
|
||||
|
||||
# HTTP client (created when needed)
|
||||
self.http_client = None
|
||||
|
||||
# Logging
|
||||
self.logger = logging.getLogger("security_events_handler")
|
||||
|
||||
# Event types that should not be collected (to avoid recursion)
|
||||
self.excluded_event_types = [
|
||||
InternalEventType.FLUSH_SECURITY_TRACES,
|
||||
]
|
||||
|
||||
async def handle(self, event: SecurityEventTypes) -> Dict[str, Any]:
|
||||
"""
|
||||
Handle an event - either collect it or process a flush request.
|
||||
|
||||
Args:
|
||||
event: The event to handle
|
||||
|
||||
Returns:
|
||||
Status dictionary
|
||||
"""
|
||||
|
||||
if event.type is InternalEventType.CLOSE_RESOURCES:
|
||||
self.logger.info("Received request to close resources")
|
||||
await self.close_async()
|
||||
return {"closed": True}
|
||||
|
||||
if event.type is InternalEventType.FLUSH_SECURITY_TRACES:
|
||||
self.logger.info(f"Received flush request from {event.source}")
|
||||
return await self.flush(event_payload=event.payload)
|
||||
|
||||
# Don't collect excluded event types
|
||||
if any(event == t for t in self.excluded_event_types):
|
||||
return {"skipped": True, "reason": "excluded_event_type"}
|
||||
|
||||
try:
|
||||
event_data = event.model_dump(mode="json")
|
||||
except Exception:
|
||||
return {"collected": False, "event_count": len(self.collected_events)}
|
||||
|
||||
# Add to in-memory collection
|
||||
self.collected_events.append(event_data)
|
||||
event_count = len(self.collected_events)
|
||||
|
||||
self.logger.debug(
|
||||
f"Collected event: {event.type}, total event count: {event_count}"
|
||||
)
|
||||
|
||||
return {"collected": True, "event_count": event_count}
|
||||
|
||||
async def _build_context_data(self, obj: Optional["SafetyCLI"]) -> "EventContext":
|
||||
"""
|
||||
Generate context data for telemetry events.
|
||||
|
||||
Returns:
|
||||
Dict containing context information about client, runtime, etc.
|
||||
"""
|
||||
from safety_schemas.models.events.types import SourceType
|
||||
from safety.events.utils.context import create_event_context
|
||||
|
||||
project = getattr(obj, "project", None)
|
||||
tags = None
|
||||
try:
|
||||
if obj and obj.auth and obj.auth.stage:
|
||||
tags = [obj.auth.stage.value]
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
version = get_version() or "unknown"
|
||||
|
||||
path = ""
|
||||
try:
|
||||
path = sys.argv[0]
|
||||
except (IndexError, TypeError):
|
||||
pass
|
||||
|
||||
context = await asyncio.get_event_loop().run_in_executor(
|
||||
None,
|
||||
functools.partial(
|
||||
create_event_context,
|
||||
SourceType(get_identifier()),
|
||||
version,
|
||||
path,
|
||||
project,
|
||||
tags,
|
||||
),
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
@retry(
|
||||
stop=stop_after_attempt(3),
|
||||
wait=wait_exponential(multiplier=0.1, min=0.2, max=1.0),
|
||||
retry=retry_if_exception_type(
|
||||
(httpx.NetworkError, httpx.TimeoutException, httpx.HTTPStatusError)
|
||||
),
|
||||
before_sleep=before_sleep_log(logging.getLogger("api_client"), logging.WARNING),
|
||||
)
|
||||
async def _send_events(
|
||||
self, payload: dict, headers: dict
|
||||
) -> Optional[httpx.Response]:
|
||||
"""
|
||||
Send events to the API with retry logic.
|
||||
|
||||
Args:
|
||||
payload: The data payload to send
|
||||
headers: The HTTP headers to include
|
||||
|
||||
Returns:
|
||||
Response from the API or None if http_client is not initialized
|
||||
|
||||
Raises:
|
||||
Exception if all retries fail
|
||||
"""
|
||||
if self.http_client is None:
|
||||
self.logger.warning("Cannot send events: HTTP client not initialized")
|
||||
return None
|
||||
|
||||
TIMEOUT = int(os.getenv("SAFETY_REQUEST_TIMEOUT_EVENTS", 10))
|
||||
|
||||
response = await self.http_client.post(
|
||||
self.api_endpoint, json=payload, headers=headers, timeout=TIMEOUT
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response
|
||||
|
||||
async def flush(self, event_payload: "InternalPayload") -> Dict[str, Any]:
|
||||
"""
|
||||
Send all collected events to the API endpoint.
|
||||
|
||||
Returns:
|
||||
Status dictionary
|
||||
"""
|
||||
# If no events, just return early
|
||||
if not self.collected_events:
|
||||
return {"status": "no_events", "count": 0}
|
||||
|
||||
# Get a copy of events and clear the original list
|
||||
events_to_send = self.collected_events.copy()
|
||||
self.collected_events.clear()
|
||||
|
||||
event_count = len(events_to_send)
|
||||
self.logger.info(
|
||||
"[Flush] -> Sending %s events to %s", event_count, self.api_endpoint
|
||||
)
|
||||
IDEMPOTENCY_KEY = str(uuid.uuid4())
|
||||
|
||||
# Get context data that will be shared across all events
|
||||
obj = event_payload.ctx.obj if event_payload.ctx else None
|
||||
context = await self._build_context_data(obj=obj)
|
||||
self.logger.info("Context data built")
|
||||
|
||||
for event_data in events_to_send:
|
||||
event_data["context"] = context.model_dump(mode="json")
|
||||
|
||||
payload = {"events": events_to_send}
|
||||
|
||||
# Create HTTP client if needed
|
||||
if self.http_client is None:
|
||||
# TODO: Add proxy support
|
||||
self.http_client = httpx.AsyncClient(proxy=None)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"X-Idempotency-Key": IDEMPOTENCY_KEY,
|
||||
}
|
||||
headers.update(get_meta_http_headers())
|
||||
|
||||
# Add authentication
|
||||
if self.api_key:
|
||||
headers["X-Api-Key"] = self.api_key
|
||||
elif self.auth_token:
|
||||
headers["Authorization"] = f"Bearer {self.auth_token}"
|
||||
|
||||
try:
|
||||
# Send the request with retries
|
||||
response = await self._send_events(payload, headers)
|
||||
|
||||
# Handle case where http_client was None and _send_events returned None
|
||||
if response is None:
|
||||
self.logger.warning("Events not sent: HTTP client not initialized")
|
||||
# Put events back in collection
|
||||
self.collected_events = events_to_send + self.collected_events
|
||||
return {
|
||||
"status": "error",
|
||||
"count": event_count,
|
||||
"error": "HTTP client not initialized",
|
||||
}
|
||||
|
||||
self.logger.info(
|
||||
f"Successfully sent {event_count} events, status: {response.status_code}"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"count": event_count,
|
||||
"http_status": response.status_code,
|
||||
}
|
||||
except tenacity.RetryError as retry_exc:
|
||||
# Put events back in collection
|
||||
self.collected_events = events_to_send + self.collected_events
|
||||
exc = retry_exc.last_attempt.exception()
|
||||
|
||||
status_code = None
|
||||
if hasattr(exc, "response"):
|
||||
status_code = exc.response # type: ignore
|
||||
|
||||
self.logger.error(f"Failed after retries: {exc}")
|
||||
|
||||
result = {"status": "error", "count": event_count, "error": repr(exc)}
|
||||
if status_code:
|
||||
result["http_status"] = status_code
|
||||
|
||||
return result
|
||||
except Exception as exc:
|
||||
# Handle any other unexpected exceptions
|
||||
self.collected_events = events_to_send + self.collected_events
|
||||
self.logger.exception(f"Unexpected error: {exc}")
|
||||
|
||||
return {"status": "error", "count": event_count, "error": repr(exc)}
|
||||
|
||||
async def close_async(self):
|
||||
"""Close the HTTP client asynchronously."""
|
||||
if self.http_client:
|
||||
await self.http_client.aclose()
|
||||
self.http_client = None
|
||||
self.logger.debug("HTTP client closed")
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Synchronous method to close the handler.
|
||||
|
||||
This is a non-blocking method that initiates closure but doesn't wait for it.
|
||||
The event bus will handle the actual closing asynchronously.
|
||||
"""
|
||||
self.logger.info("Initiating telemetry handler shutdown")
|
||||
# The actual close will happen when the event loop processes events
|
||||
# Just log the intent and let the event loop handle it
|
||||
return {"status": "shutdown_initiated"}
|
||||
|
||||
def get_stats(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get current telemetry statistics.
|
||||
|
||||
Returns:
|
||||
Dictionary of statistics
|
||||
"""
|
||||
event_count = len(self.collected_events)
|
||||
|
||||
# Group events by type
|
||||
event_types = {}
|
||||
for event in self.collected_events:
|
||||
event_type = event.get("event_type", "unknown")
|
||||
if event_type not in event_types:
|
||||
event_types[event_type] = 0
|
||||
event_types[event_type] += 1
|
||||
|
||||
return {
|
||||
"events_in_memory": event_count,
|
||||
"event_types": event_types,
|
||||
"api_endpoint": self.api_endpoint,
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
from .aliases import (
|
||||
CloseResourcesEvent,
|
||||
CommandErrorEvent,
|
||||
CommandExecutedEvent,
|
||||
FirewallConfiguredEvent,
|
||||
FirewallDisabledEvent,
|
||||
FirewallHeartbeatEvent,
|
||||
FlushSecurityTracesEvent,
|
||||
PackageInstalledEvent,
|
||||
PackageUninstalledEvent,
|
||||
EventBusReadyEvent,
|
||||
)
|
||||
from .base import InternalEventType, InternalPayload
|
||||
|
||||
__all__ = [
|
||||
"CloseResourcesEvent",
|
||||
"FlushSecurityTracesEvent",
|
||||
"InternalEventType",
|
||||
"InternalPayload",
|
||||
"CommandExecutedEvent",
|
||||
"CommandErrorEvent",
|
||||
"PackageInstalledEvent",
|
||||
"PackageUninstalledEvent",
|
||||
"FirewallHeartbeatEvent",
|
||||
"FirewallConfiguredEvent",
|
||||
"FirewallDisabledEvent",
|
||||
"EventBusReadyEvent",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,81 @@
|
||||
from typing import Literal
|
||||
|
||||
from safety_schemas.models.events import Event, EventType
|
||||
from safety_schemas.models.events.payloads import (
|
||||
AuthCompletedPayload,
|
||||
AuthStartedPayload,
|
||||
CodebaseSetupCompletedPayload,
|
||||
CodebaseSetupResponseCreatedPayload,
|
||||
FirewallConfiguredPayload,
|
||||
FirewallDisabledPayload,
|
||||
FirewallSetupCompletedPayload,
|
||||
FirewallSetupResponseCreatedPayload,
|
||||
InitScanCompletedPayload,
|
||||
InitStartedPayload,
|
||||
PackageInstalledPayload,
|
||||
PackageUninstalledPayload,
|
||||
CommandExecutedPayload,
|
||||
CommandErrorPayload,
|
||||
FirewallHeartbeatPayload,
|
||||
CodebaseDetectionStatusPayload,
|
||||
)
|
||||
|
||||
from .base import InternalEventType, InternalPayload
|
||||
|
||||
CommandExecutedEvent = Event[
|
||||
Literal[EventType.COMMAND_EXECUTED], CommandExecutedPayload
|
||||
]
|
||||
CommandErrorEvent = Event[Literal[EventType.COMMAND_ERROR], CommandErrorPayload]
|
||||
PackageInstalledEvent = Event[
|
||||
Literal[EventType.PACKAGE_INSTALLED], PackageInstalledPayload
|
||||
]
|
||||
PackageUninstalledEvent = Event[
|
||||
Literal[EventType.PACKAGE_UNINSTALLED], PackageUninstalledPayload
|
||||
]
|
||||
FirewallHeartbeatEvent = Event[
|
||||
Literal[EventType.FIREWALL_HEARTBEAT], FirewallHeartbeatPayload
|
||||
]
|
||||
FirewallConfiguredEvent = Event[
|
||||
Literal[EventType.FIREWALL_CONFIGURED], FirewallConfiguredPayload
|
||||
]
|
||||
FirewallDisabledEvent = Event[
|
||||
Literal[EventType.FIREWALL_DISABLED], FirewallDisabledPayload
|
||||
]
|
||||
|
||||
|
||||
InitStartedEvent = Event[Literal[EventType.INIT_STARTED], InitStartedPayload]
|
||||
AuthStartedEvent = Event[Literal[EventType.AUTH_STARTED], AuthStartedPayload]
|
||||
AuthCompletedEvent = Event[Literal[EventType.AUTH_COMPLETED], AuthCompletedPayload]
|
||||
|
||||
# Firewall setup events
|
||||
FirewallSetupResponseCreatedEvent = Event[
|
||||
Literal[EventType.FIREWALL_SETUP_RESPONSE_CREATED],
|
||||
FirewallSetupResponseCreatedPayload,
|
||||
]
|
||||
FirewallSetupCompletedEvent = Event[
|
||||
Literal[EventType.FIREWALL_SETUP_COMPLETED], FirewallSetupCompletedPayload
|
||||
]
|
||||
|
||||
# Codebase setup events
|
||||
CodebaseDetectionStatusEvent = Event[
|
||||
Literal[EventType.CODEBASE_DETECTION_STATUS], CodebaseDetectionStatusPayload
|
||||
]
|
||||
CodebaseSetupResponseCreatedEvent = Event[
|
||||
Literal[EventType.CODEBASE_SETUP_RESPONSE_CREATED],
|
||||
CodebaseSetupResponseCreatedPayload,
|
||||
]
|
||||
CodebaseSetupCompletedEvent = Event[
|
||||
Literal[EventType.CODEBASE_SETUP_COMPLETED], CodebaseSetupCompletedPayload
|
||||
]
|
||||
|
||||
# Scan events
|
||||
InitScanCompletedEvent = Event[
|
||||
Literal[EventType.INIT_SCAN_COMPLETED], InitScanCompletedPayload
|
||||
]
|
||||
|
||||
# Internal events
|
||||
CloseResourcesEvent = Event[InternalEventType.CLOSE_RESOURCES, InternalPayload]
|
||||
FlushSecurityTracesEvent = Event[
|
||||
InternalEventType.FLUSH_SECURITY_TRACES, InternalPayload
|
||||
]
|
||||
EventBusReadyEvent = Event[Literal[InternalEventType.EVENT_BUS_READY], InternalPayload]
|
||||
@@ -0,0 +1,24 @@
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
from typing_extensions import Annotated
|
||||
from pydantic import ConfigDict
|
||||
from safety_schemas.models.events import EventTypeBase, PayloadBase
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
|
||||
class InternalEventType(EventTypeBase):
|
||||
"""
|
||||
Internal event types.
|
||||
"""
|
||||
|
||||
CLOSE_RESOURCES = "com.safetycli.close_resources"
|
||||
FLUSH_SECURITY_TRACES = "com.safetycli.flush_security_traces"
|
||||
EVENT_BUS_READY = "com.safetycli.event_bus_ready"
|
||||
|
||||
|
||||
class InternalPayload(PayloadBase):
|
||||
ctx: Optional[Annotated[Any, "CustomContext"]] = None
|
||||
|
||||
model_config = ConfigDict(extra="allow")
|
||||
@@ -0,0 +1,34 @@
|
||||
from .emission import (
|
||||
emit_command_error,
|
||||
emit_command_executed,
|
||||
emit_firewall_disabled,
|
||||
emit_diff_operations,
|
||||
emit_firewall_configured,
|
||||
emit_tool_command_executed,
|
||||
emit_firewall_heartbeat,
|
||||
emit_init_started,
|
||||
emit_auth_started,
|
||||
emit_auth_completed,
|
||||
)
|
||||
|
||||
from .creation import (
|
||||
create_internal_event,
|
||||
InternalEventType,
|
||||
InternalPayload,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"emit_command_error",
|
||||
"emit_command_executed",
|
||||
"emit_firewall_disabled",
|
||||
"create_internal_event",
|
||||
"InternalEventType",
|
||||
"InternalPayload",
|
||||
"emit_firewall_configured",
|
||||
"emit_diff_operations",
|
||||
"emit_init_started",
|
||||
"emit_auth_started",
|
||||
"emit_auth_completed",
|
||||
"emit_tool_command_executed",
|
||||
"emit_firewall_heartbeat",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,79 @@
|
||||
from functools import wraps
|
||||
from typing import TYPE_CHECKING, Any, Callable, List, Optional, TypeVar, cast, overload
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from safety.events.event_bus import EventBus
|
||||
from safety.cli_util import CustomContext
|
||||
|
||||
|
||||
def should_emit(
|
||||
event_bus: Optional["EventBus"], ctx: Optional["CustomContext"]
|
||||
) -> bool:
|
||||
"""
|
||||
Common conditions that apply to all event emissions.
|
||||
"""
|
||||
if event_bus is None:
|
||||
return False
|
||||
|
||||
# Be aware that ctx depends on the command being parsed, if the emit func
|
||||
# is called from the entrypoint group command, ctx will not have
|
||||
# the command parsed yet.
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def should_emit_firewall_heartbeat(ctx: Optional["CustomContext"]) -> bool:
|
||||
"""
|
||||
Condition to check if the firewall is enabled.
|
||||
"""
|
||||
if ctx and ctx.obj.firewall_enabled:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# Define TypeVars for better typing
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
@overload
|
||||
def conditional_emitter(emit_func: F, *, conditions: None = None) -> F: ...
|
||||
|
||||
|
||||
@overload
|
||||
def conditional_emitter(
|
||||
emit_func: None = None,
|
||||
*,
|
||||
conditions: Optional[List[Callable[[Optional["CustomContext"]], bool]]] = None,
|
||||
) -> Callable[[F], F]: ...
|
||||
|
||||
|
||||
def conditional_emitter(
|
||||
emit_func=None,
|
||||
*,
|
||||
conditions: Optional[List[Callable[[Optional["CustomContext"]], bool]]] = None,
|
||||
):
|
||||
"""
|
||||
A decorator that conditionally calls the decorated function based on conditions.
|
||||
Only executes the decorated function if all conditions evaluate to True.
|
||||
"""
|
||||
|
||||
def decorator(func: F) -> F:
|
||||
@wraps(func)
|
||||
def wrapper(event_bus, ctx=None, *args, **kwargs):
|
||||
if not should_emit(event_bus, ctx):
|
||||
return None
|
||||
|
||||
if conditions:
|
||||
if all(condition(ctx) for condition in conditions):
|
||||
return func(event_bus, ctx, *args, **kwargs)
|
||||
return None
|
||||
return func(event_bus, ctx, *args, **kwargs)
|
||||
|
||||
return cast(F, wrapper) # Cast to help type checker
|
||||
|
||||
if emit_func is None:
|
||||
return decorator
|
||||
return decorator(emit_func)
|
||||
@@ -0,0 +1,163 @@
|
||||
import getpass
|
||||
import os
|
||||
from pathlib import Path
|
||||
import site
|
||||
import socket
|
||||
import sys
|
||||
import platform
|
||||
from typing import List, Optional
|
||||
from safety_schemas.models.events.context import (
|
||||
ClientInfo,
|
||||
EventContext,
|
||||
HostInfo,
|
||||
OsInfo,
|
||||
ProjectInfo,
|
||||
PythonInfo,
|
||||
RuntimeInfo,
|
||||
UserInfo,
|
||||
)
|
||||
|
||||
from safety_schemas.models.events.types import SourceType
|
||||
from safety_schemas.models import ProjectModel
|
||||
|
||||
|
||||
def get_user_info() -> UserInfo:
|
||||
"""
|
||||
Collect information about the current user.
|
||||
"""
|
||||
return UserInfo(name=getpass.getuser(), home_dir=str(Path.home()))
|
||||
|
||||
|
||||
def get_os_info() -> OsInfo:
|
||||
"""
|
||||
Get basic OS information using only the platform module.
|
||||
Returns a dictionary with architecture, platform, name, version, and kernel_version.
|
||||
"""
|
||||
# Initialize with required fields
|
||||
os_info = {
|
||||
"architecture": platform.machine(),
|
||||
"platform": platform.system(),
|
||||
"name": None,
|
||||
"version": None,
|
||||
"kernel_version": None,
|
||||
}
|
||||
|
||||
python_version = sys.version_info
|
||||
|
||||
if sys.platform == "wind32":
|
||||
os_info["version"] = platform.release()
|
||||
os_info["kernel_version"] = platform.version()
|
||||
os_info["name"] = "windows"
|
||||
|
||||
elif sys.platform == "darwin":
|
||||
os_info["version"] = platform.mac_ver()[0]
|
||||
os_info["kernel_version"] = platform.release()
|
||||
os_info["name"] = "macos"
|
||||
|
||||
elif sys.platform == "linux":
|
||||
os_info["kernel_version"] = platform.release()
|
||||
if python_version >= (3, 10):
|
||||
try:
|
||||
os_release = platform.freedesktop_os_release()
|
||||
# Use ID for name (more consistent for programmatic use)
|
||||
os_info["name"] = os_release.get("ID", "linux")
|
||||
os_info["version"] = os_release.get("VERSION_ID")
|
||||
except (OSError, AttributeError):
|
||||
# If freedesktop_os_release fails, keep values as is
|
||||
pass
|
||||
|
||||
return OsInfo(**os_info)
|
||||
|
||||
|
||||
def get_host_info() -> HostInfo:
|
||||
"""
|
||||
Collect information about the host machine.
|
||||
"""
|
||||
hostname = socket.gethostname()
|
||||
|
||||
ipv4_addresses = set()
|
||||
ipv6_addresses = set()
|
||||
try:
|
||||
host_info = socket.getaddrinfo(hostname, None)
|
||||
for info in host_info:
|
||||
ip_family = info[0]
|
||||
ip = str(info[4][0])
|
||||
|
||||
if ip_family == socket.AF_INET:
|
||||
if not ip.startswith("127."):
|
||||
ipv4_addresses.add(ip)
|
||||
elif ip_family == socket.AF_INET6:
|
||||
if not ip.startswith("::1") and ip != "fe80::1":
|
||||
ipv6_addresses.add(ip)
|
||||
|
||||
# Prioritize addresses
|
||||
primary_ipv4 = next(
|
||||
(ip for ip in ipv4_addresses),
|
||||
next(iter(ipv4_addresses)) if ipv4_addresses else None,
|
||||
)
|
||||
|
||||
primary_ipv6 = next(
|
||||
(ip for ip in ipv6_addresses if not ip.startswith("fe80:")),
|
||||
next(iter(ipv6_addresses)) if ipv6_addresses else None,
|
||||
)
|
||||
|
||||
except socket.gaierror:
|
||||
primary_ipv4 = None
|
||||
primary_ipv6 = None
|
||||
|
||||
return HostInfo(name=hostname, ipv4=primary_ipv4, ipv6=primary_ipv6, timezone=None)
|
||||
|
||||
|
||||
def get_python_info() -> PythonInfo:
|
||||
"""
|
||||
Collect detailed information about the Python environment.
|
||||
"""
|
||||
# Get site-packages directories
|
||||
site_packages_dirs = site.getsitepackages()
|
||||
|
||||
user_site_enabled = bool(site.ENABLE_USER_SITE)
|
||||
user_site_packages = site.getusersitepackages()
|
||||
|
||||
return PythonInfo(
|
||||
version=f"{sys.version_info.major}.{sys.version_info.minor}",
|
||||
path=sys.executable,
|
||||
sys_path=sys.path,
|
||||
implementation=platform.python_implementation(),
|
||||
implementation_version=platform.python_version(),
|
||||
sys_prefix=sys.prefix,
|
||||
site_packages=site_packages_dirs,
|
||||
user_site_enabled=user_site_enabled,
|
||||
user_site_packages=user_site_packages,
|
||||
encoding=sys.getdefaultencoding(),
|
||||
filesystem_encoding=sys.getfilesystemencoding(),
|
||||
)
|
||||
|
||||
|
||||
def create_event_context(
|
||||
client_identifier: SourceType,
|
||||
client_version: str,
|
||||
client_path: str,
|
||||
project: Optional[ProjectModel] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
) -> EventContext:
|
||||
client = ClientInfo(
|
||||
identifier=client_identifier, version=client_version, path=client_path
|
||||
)
|
||||
|
||||
project_info = None
|
||||
|
||||
if project:
|
||||
project_info = ProjectInfo(
|
||||
id=project.id,
|
||||
url=project.url_path,
|
||||
)
|
||||
|
||||
runtime = RuntimeInfo(
|
||||
workdir=os.getcwd(),
|
||||
user=get_user_info(),
|
||||
os=get_os_info(),
|
||||
host=get_host_info(),
|
||||
python=get_python_info(),
|
||||
)
|
||||
|
||||
return EventContext(client=client, runtime=runtime, project=project_info, tags=tags)
|
||||
@@ -0,0 +1,48 @@
|
||||
import time
|
||||
from typing import Optional, TypeVar
|
||||
|
||||
from safety_schemas.models.events import Event, EventTypeBase, PayloadBase, SourceType
|
||||
|
||||
from safety.meta import get_identifier
|
||||
|
||||
from ..types import InternalEventType, InternalPayload
|
||||
|
||||
PayloadBaseT = TypeVar("PayloadBaseT", bound=PayloadBase)
|
||||
EventTypeBaseT = TypeVar("EventTypeBaseT", bound=EventTypeBase)
|
||||
|
||||
|
||||
def create_event(
|
||||
payload: PayloadBaseT,
|
||||
event_type: EventTypeBaseT,
|
||||
source: SourceType = SourceType(get_identifier()),
|
||||
timestamp: int = int(time.time()),
|
||||
correlation_id: Optional[str] = None,
|
||||
**kwargs,
|
||||
) -> Event[EventTypeBaseT, PayloadBaseT]:
|
||||
"""
|
||||
Generic factory function for creating any type of event.
|
||||
"""
|
||||
|
||||
return Event(
|
||||
timestamp=timestamp,
|
||||
payload=payload,
|
||||
type=event_type,
|
||||
source=source,
|
||||
correlation_id=correlation_id,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def create_internal_event(
|
||||
event_type: InternalEventType,
|
||||
payload: InternalPayload,
|
||||
) -> Event[InternalEventType, InternalPayload]:
|
||||
"""
|
||||
Create an internal event.
|
||||
"""
|
||||
return Event(
|
||||
type=event_type,
|
||||
timestamp=int(time.time()),
|
||||
source=SourceType(get_identifier()),
|
||||
payload=payload,
|
||||
)
|
||||
@@ -0,0 +1,110 @@
|
||||
import re
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from click.core import ParameterSource as ClickParameterSource
|
||||
|
||||
from safety_schemas.models.events.types import ParamSource
|
||||
|
||||
|
||||
def is_sensitive_parameter(param_name: str) -> bool:
|
||||
"""
|
||||
Determine if a parameter name likely contains sensitive information.
|
||||
"""
|
||||
sensitive_patterns = [
|
||||
r"(?i)pass(word)?", # password, pass
|
||||
r"(?i)token", # token, auth_token
|
||||
r"(?i)key", # key, apikey
|
||||
r"(?i)auth", # auth, authorization
|
||||
]
|
||||
|
||||
return any(re.search(pattern, param_name) for pattern in sensitive_patterns)
|
||||
|
||||
|
||||
def scrub_sensitive_value(value: str) -> str:
|
||||
"""
|
||||
Detect if a value appears to be sensitive information based on
|
||||
specific patterns.
|
||||
"""
|
||||
if not isinstance(value, str):
|
||||
return value
|
||||
|
||||
result = value
|
||||
|
||||
if re.match(r"^-{1,2}[\w-]+$", value) and "=" not in value:
|
||||
return value
|
||||
|
||||
# Patterns to detect and replace
|
||||
patterns = [
|
||||
# This will replace ports too, but that's fine
|
||||
(r"\b\w+:\w+\b", "-:-"),
|
||||
(r"Basic\s+[A-Za-z0-9+/=]+", "Basic -"),
|
||||
(r"Bearer\s+[A-Za-z0-9._~+/=-]+", "Bearer -"),
|
||||
(r"\b[A-Za-z0-9_-]{20,}\b", "-"),
|
||||
(
|
||||
r"((?:token|api|apikey|key|auth|secret|password|access|jwt|bearer|credential|pwd)=)([^&\s]+)",
|
||||
r"\1-",
|
||||
),
|
||||
]
|
||||
|
||||
# Apply each pattern and replace matches
|
||||
for pattern, repl in patterns:
|
||||
result = re.sub(pattern, repl, result)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def clean_parameter(param_name: str, param_value: Any) -> Any:
|
||||
"""
|
||||
Scrub a parameter value if it's sensitive.
|
||||
"""
|
||||
if not isinstance(param_value, str):
|
||||
return param_value
|
||||
|
||||
if is_sensitive_parameter(param_name):
|
||||
return "-"
|
||||
|
||||
return scrub_sensitive_value(param_value)
|
||||
|
||||
|
||||
def get_command_path(ctx) -> List[str]:
|
||||
hierarchy = []
|
||||
current = ctx
|
||||
|
||||
while current is not None:
|
||||
if current.command:
|
||||
name = current.command.name
|
||||
if name == "cli":
|
||||
name = "safety"
|
||||
hierarchy.append(name)
|
||||
current = current.parent
|
||||
|
||||
# Reverse to get top-level first
|
||||
hierarchy.reverse()
|
||||
|
||||
return hierarchy
|
||||
|
||||
|
||||
def get_root_context(ctx):
|
||||
"""
|
||||
Get the top-level parent context.
|
||||
"""
|
||||
current = ctx
|
||||
while current.parent is not None:
|
||||
current = current.parent
|
||||
return current
|
||||
|
||||
|
||||
def translate_param_source(source: Optional[ClickParameterSource]) -> ParamSource:
|
||||
"""
|
||||
Translate Click's ParameterSource enum to our ParameterSource enum
|
||||
"""
|
||||
mapping = {
|
||||
ClickParameterSource.COMMANDLINE: ParamSource.COMMANDLINE,
|
||||
ClickParameterSource.ENVIRONMENT: ParamSource.ENVIRONMENT,
|
||||
ClickParameterSource.DEFAULT: ParamSource.DEFAULT,
|
||||
# In newer Click versions
|
||||
getattr(ClickParameterSource, "PROMPT", None): ParamSource.PROMPT,
|
||||
getattr(ClickParameterSource, "CONFIG_FILE", None): ParamSource.CONFIG,
|
||||
}
|
||||
|
||||
return mapping.get(source, ParamSource.UNKNOWN)
|
||||
@@ -0,0 +1,681 @@
|
||||
from concurrent.futures import Future
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Dict,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
import uuid
|
||||
|
||||
from safety.utils.pyapp_utils import get_path, get_env
|
||||
|
||||
from safety_schemas.models.events import Event, EventType
|
||||
from safety_schemas.models.events.types import ToolType
|
||||
from safety_schemas.models.events.payloads import (
|
||||
CodebaseDetectionStatusPayload,
|
||||
CodebaseSetupCompletedPayload,
|
||||
CodebaseSetupResponseCreatedPayload,
|
||||
DependencyFile,
|
||||
FirewallConfiguredPayload,
|
||||
FirewallDisabledPayload,
|
||||
FirewallSetupCompletedPayload,
|
||||
FirewallSetupResponseCreatedPayload,
|
||||
InitExitStep,
|
||||
InitExitedPayload,
|
||||
InitScanCompletedPayload,
|
||||
PackageInstalledPayload,
|
||||
PackageUninstalledPayload,
|
||||
PackageUpdatedPayload,
|
||||
CommandExecutedPayload,
|
||||
ToolCommandExecutedPayload,
|
||||
CommandErrorPayload,
|
||||
AliasConfig,
|
||||
IndexConfig,
|
||||
ToolStatus,
|
||||
CommandParam,
|
||||
ProcessStatus,
|
||||
FirewallHeartbeatPayload,
|
||||
InitStartedPayload,
|
||||
AuthStartedPayload,
|
||||
AuthCompletedPayload,
|
||||
)
|
||||
import typer
|
||||
|
||||
from ..event_bus import EventBus
|
||||
from ..types.base import InternalEventType, InternalPayload
|
||||
|
||||
from .creation import (
|
||||
create_event,
|
||||
)
|
||||
from .data import (
|
||||
clean_parameter,
|
||||
get_command_path,
|
||||
get_root_context,
|
||||
scrub_sensitive_value,
|
||||
translate_param_source,
|
||||
)
|
||||
from .conditions import conditional_emitter, should_emit_firewall_heartbeat
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from safety.models import SafetyCLI, ToolResult
|
||||
from safety.cli_util import CustomContext
|
||||
from safety.init.types import FirewallConfigStatus
|
||||
from safety.tool.environment_diff import PackageLocation
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def send_and_flush(event_bus: "EventBus", event: Event) -> Optional[Future]:
|
||||
"""
|
||||
Emit an event and immediately flush the event bus without closing it.
|
||||
|
||||
Args:
|
||||
event_bus: The event bus to emit on
|
||||
event: The event to emit
|
||||
"""
|
||||
future = event_bus.emit(event)
|
||||
|
||||
# Create and emit flush event
|
||||
flush_payload = InternalPayload()
|
||||
flush_event = create_event(
|
||||
payload=flush_payload, event_type=InternalEventType.FLUSH_SECURITY_TRACES
|
||||
)
|
||||
|
||||
# Emit flush event and wait for it to complete
|
||||
flush_future = event_bus.emit(flush_event)
|
||||
|
||||
# Wait for both events to complete
|
||||
if future:
|
||||
try:
|
||||
future.result(timeout=0.5)
|
||||
except Exception:
|
||||
logger.error("Emit Failed %s (%s)", event.type, event.id)
|
||||
|
||||
if flush_future:
|
||||
try:
|
||||
return flush_future.result(timeout=0.5)
|
||||
except Exception:
|
||||
logger.error("Flush Failed for event %s", event.id)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@conditional_emitter(conditions=[should_emit_firewall_heartbeat])
|
||||
def emit_firewall_heartbeat(
|
||||
event_bus: "EventBus", ctx: Optional["CustomContext"], *, tools: List[ToolStatus]
|
||||
):
|
||||
payload = FirewallHeartbeatPayload(tools=tools)
|
||||
event = create_event(payload=payload, event_type=EventType.FIREWALL_HEARTBEAT)
|
||||
|
||||
event_bus.emit(event)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_firewall_disabled(
|
||||
event_bus: "EventBus",
|
||||
ctx: Optional["CustomContext"] = None,
|
||||
*,
|
||||
reason: Optional[str],
|
||||
):
|
||||
payload = FirewallDisabledPayload(reason=reason)
|
||||
event = create_event(payload=payload, event_type=EventType.FIREWALL_DISABLED)
|
||||
|
||||
event_bus.emit(event)
|
||||
|
||||
|
||||
def status_to_tool_status(status: "FirewallConfigStatus") -> List[ToolStatus]:
|
||||
filtered_path = get_path()
|
||||
tools = []
|
||||
for tool_type, configs in status.items():
|
||||
alias_config = (
|
||||
configs["alias"] if isinstance(configs["alias"], AliasConfig) else None
|
||||
)
|
||||
index_config = (
|
||||
configs["index"] if isinstance(configs["index"], IndexConfig) else None
|
||||
)
|
||||
|
||||
tool = tool_type.value
|
||||
command_path = shutil.which(tool, path=filtered_path)
|
||||
reachable = False
|
||||
version = "unknown"
|
||||
|
||||
if command_path:
|
||||
args = [command_path, "--version"]
|
||||
result = subprocess.run(args, capture_output=True, text=True, env=get_env())
|
||||
|
||||
if result.returncode == 0:
|
||||
output = result.stdout
|
||||
reachable = True
|
||||
|
||||
# Extract version
|
||||
version_match = re.search(r"(\d+\.\d+(?:\.\d+)?)", output)
|
||||
if version_match:
|
||||
version = version_match.group(1)
|
||||
else:
|
||||
command_path = tool
|
||||
|
||||
tool = ToolStatus(
|
||||
type=tool_type,
|
||||
command_path=command_path,
|
||||
version=version,
|
||||
reachable=reachable,
|
||||
alias_config=alias_config,
|
||||
index_config=index_config,
|
||||
)
|
||||
tools.append(tool)
|
||||
|
||||
return tools
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_firewall_configured(
|
||||
event_bus: "EventBus",
|
||||
ctx: Optional["CustomContext"] = None,
|
||||
*,
|
||||
status: "FirewallConfigStatus",
|
||||
):
|
||||
tools = status_to_tool_status(status)
|
||||
|
||||
payload = FirewallConfiguredPayload(tools=tools)
|
||||
|
||||
event = create_event(payload=payload, event_type=EventType.FIREWALL_CONFIGURED)
|
||||
|
||||
event_bus.emit(event)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_diff_operations(
|
||||
event_bus: "EventBus",
|
||||
ctx: "CustomContext",
|
||||
*,
|
||||
added: Dict["PackageLocation", str],
|
||||
removed: Dict["PackageLocation", str],
|
||||
updated: Dict["PackageLocation", Tuple[str, str]],
|
||||
tool_path: Optional[str],
|
||||
by_tool: ToolType,
|
||||
):
|
||||
obj: "SafetyCLI" = ctx.obj
|
||||
correlation_id = obj.correlation_id
|
||||
|
||||
kwargs = {
|
||||
"tool_path": tool_path,
|
||||
"tool": by_tool,
|
||||
}
|
||||
|
||||
if (added or removed or updated) and not correlation_id:
|
||||
correlation_id = obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
def emit_package_event(event_bus, correlation_id, payload, event_type):
|
||||
event = create_event(
|
||||
payload=payload,
|
||||
event_type=event_type,
|
||||
correlation_id=correlation_id,
|
||||
)
|
||||
event_bus.emit(event)
|
||||
|
||||
for package, version in added.items():
|
||||
emit_package_event(
|
||||
event_bus,
|
||||
correlation_id,
|
||||
PackageInstalledPayload(
|
||||
package_name=package.name,
|
||||
location=package.location,
|
||||
version=version,
|
||||
**kwargs,
|
||||
),
|
||||
EventType.PACKAGE_INSTALLED,
|
||||
)
|
||||
|
||||
for package, version in removed.items():
|
||||
emit_package_event(
|
||||
event_bus,
|
||||
correlation_id,
|
||||
PackageUninstalledPayload(
|
||||
package_name=package.name,
|
||||
location=package.location,
|
||||
version=version,
|
||||
**kwargs,
|
||||
),
|
||||
EventType.PACKAGE_UNINSTALLED,
|
||||
)
|
||||
|
||||
for package, (previous_version, current_version) in updated.items():
|
||||
emit_package_event(
|
||||
event_bus,
|
||||
correlation_id,
|
||||
PackageUpdatedPayload(
|
||||
package_name=package.name,
|
||||
location=package.location,
|
||||
previous_version=previous_version,
|
||||
current_version=current_version,
|
||||
**kwargs,
|
||||
),
|
||||
EventType.PACKAGE_UPDATED,
|
||||
)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_tool_command_executed(
|
||||
event_bus: "EventBus", ctx: "CustomContext", *, tool: ToolType, result: "ToolResult"
|
||||
) -> None:
|
||||
correlation_id = ctx.obj.correlation_id
|
||||
|
||||
if not correlation_id:
|
||||
correlation_id = ctx.obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
process = result.process
|
||||
|
||||
payload = ToolCommandExecutedPayload(
|
||||
tool=tool,
|
||||
tool_path=result.tool_path,
|
||||
raw_command=[clean_parameter("", arg) for arg in process.args],
|
||||
duration_ms=result.duration_ms,
|
||||
status=ProcessStatus(
|
||||
stdout=process.stdout, stderr=process.stderr, return_code=process.returncode
|
||||
),
|
||||
)
|
||||
|
||||
# Scrub after binary coercion to str
|
||||
if payload.status.stdout:
|
||||
payload.status.stdout = scrub_sensitive_value(payload.status.stdout)
|
||||
if payload.status.stderr:
|
||||
payload.status.stderr = scrub_sensitive_value(payload.status.stderr)
|
||||
|
||||
event = create_event(
|
||||
correlation_id=correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.TOOL_COMMAND_EXECUTED,
|
||||
)
|
||||
|
||||
event_bus.emit(event)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_command_executed(
|
||||
event_bus: "EventBus", ctx: "CustomContext", *, returned_code: int
|
||||
) -> None:
|
||||
root_context = get_root_context(ctx)
|
||||
NA = ""
|
||||
|
||||
started_at = getattr(root_context, "started_at", None) if root_context else None
|
||||
if started_at is not None:
|
||||
duration_ms = int((time.monotonic() - started_at) * 1000)
|
||||
else:
|
||||
duration_ms = 1
|
||||
|
||||
command_name = ctx.command.name if ctx.command.name is not None else NA
|
||||
raw_command = [clean_parameter("", arg) for arg in sys.argv]
|
||||
|
||||
params: List[CommandParam] = []
|
||||
|
||||
for idx, param in enumerate(ctx.command.params):
|
||||
param_name = param.name if param.name is not None else NA
|
||||
param_value = ctx.params.get(param_name)
|
||||
|
||||
# Scrub the parameter value if sensitive
|
||||
scrubbed_value = clean_parameter(param_name, param_value)
|
||||
|
||||
# Determine parameter source using Click's API
|
||||
click_source = ctx.get_parameter_source(param_name)
|
||||
source = translate_param_source(click_source)
|
||||
|
||||
display_name = param_name if param_name else None
|
||||
|
||||
params.append(
|
||||
CommandParam(
|
||||
position=idx, name=display_name, value=scrubbed_value, source=source
|
||||
)
|
||||
)
|
||||
|
||||
payload = CommandExecutedPayload(
|
||||
command_name=command_name,
|
||||
command_path=get_command_path(ctx),
|
||||
raw_command=raw_command,
|
||||
parameters=params,
|
||||
duration_ms=duration_ms,
|
||||
status=ProcessStatus(
|
||||
return_code=returned_code,
|
||||
),
|
||||
)
|
||||
|
||||
event = create_event(
|
||||
correlation_id=ctx.obj.correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.COMMAND_EXECUTED,
|
||||
)
|
||||
|
||||
try:
|
||||
if future := event_bus.emit(event):
|
||||
future.result(timeout=0.5)
|
||||
except Exception:
|
||||
logger.error("Emit Failed %s (%s)", event.type, event.id)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_command_error(
|
||||
event_bus: "EventBus",
|
||||
ctx: "CustomContext",
|
||||
*,
|
||||
message: str,
|
||||
traceback: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit a CommandErrorEvent with sensitive data scrubbed.
|
||||
"""
|
||||
# Get command name from context if available
|
||||
command_name = getattr(ctx, "command", None)
|
||||
if command_name and command_name.name:
|
||||
command_name = command_name.name
|
||||
|
||||
scrub_traceback = None
|
||||
if traceback:
|
||||
scrub_traceback = scrub_sensitive_value(traceback)
|
||||
|
||||
command_path = get_command_path(ctx)
|
||||
raw_command = [scrub_sensitive_value(arg) for arg in sys.argv]
|
||||
|
||||
payload = CommandErrorPayload(
|
||||
command_name=command_name,
|
||||
raw_command=raw_command,
|
||||
command_path=command_path,
|
||||
error_message=scrub_sensitive_value(message),
|
||||
stacktrace=scrub_traceback,
|
||||
)
|
||||
|
||||
event = create_event(
|
||||
payload=payload,
|
||||
event_type=EventType.COMMAND_ERROR,
|
||||
)
|
||||
|
||||
event_bus.emit(event)
|
||||
|
||||
|
||||
def emit_init_started(
|
||||
event_bus: "EventBus", ctx: Union["CustomContext", typer.Context]
|
||||
) -> None:
|
||||
"""
|
||||
Emit an InitStartedEvent and store it as a pending event in SafetyCLI object.
|
||||
|
||||
Args:
|
||||
event_bus: The event bus to emit on
|
||||
ctx: The Click context containing the SafetyCLI object
|
||||
"""
|
||||
obj: "SafetyCLI" = ctx.obj
|
||||
|
||||
if not obj.correlation_id:
|
||||
obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
payload = InitStartedPayload()
|
||||
event = create_event(
|
||||
correlation_id=obj.correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.INIT_STARTED,
|
||||
)
|
||||
|
||||
if not send_and_flush(event_bus, event):
|
||||
# Store as pending event
|
||||
obj.pending_events.append(event)
|
||||
|
||||
|
||||
def emit_auth_started(event_bus: "EventBus", ctx: "CustomContext") -> None:
|
||||
"""
|
||||
Emit an AuthStartedEvent and store it as a pending event in SafetyCLI object.
|
||||
|
||||
Args:
|
||||
event_bus: The event bus to emit on
|
||||
ctx: The Click context containing the SafetyCLI object
|
||||
"""
|
||||
obj: "SafetyCLI" = ctx.obj
|
||||
|
||||
if not obj.correlation_id:
|
||||
obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
payload = AuthStartedPayload()
|
||||
event = create_event(
|
||||
correlation_id=obj.correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.AUTH_STARTED,
|
||||
)
|
||||
|
||||
if not send_and_flush(event_bus, event):
|
||||
# Store as pending event
|
||||
obj.pending_events.append(event)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_auth_completed(
|
||||
event_bus: "EventBus",
|
||||
ctx: "CustomContext",
|
||||
*,
|
||||
success: bool = True,
|
||||
error_message: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit an AuthCompletedEvent and submit all pending events together.
|
||||
|
||||
Args:
|
||||
event_bus: The event bus to emit on
|
||||
ctx: The Click context containing the SafetyCLI object
|
||||
success: Whether authentication was successful
|
||||
error_message: Optional error message if authentication failed
|
||||
"""
|
||||
obj: "SafetyCLI" = ctx.obj
|
||||
|
||||
if not obj.correlation_id:
|
||||
obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
payload = AuthCompletedPayload(success=success, error_message=error_message)
|
||||
|
||||
event = create_event(
|
||||
correlation_id=obj.correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.AUTH_COMPLETED,
|
||||
)
|
||||
|
||||
for pending_event in obj.pending_events:
|
||||
event_bus.emit(pending_event)
|
||||
|
||||
obj.pending_events.clear()
|
||||
|
||||
# Emit auth completed event and flush
|
||||
send_and_flush(event_bus, event)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_firewall_setup_response_created(
|
||||
event_bus: "EventBus",
|
||||
ctx: Union["CustomContext", typer.Context],
|
||||
*,
|
||||
user_consent_requested: bool,
|
||||
user_consent: Optional[bool] = None,
|
||||
) -> None:
|
||||
obj: "SafetyCLI" = ctx.obj
|
||||
|
||||
if not obj.correlation_id:
|
||||
obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
payload = FirewallSetupResponseCreatedPayload(
|
||||
user_consent_requested=user_consent_requested, user_consent=user_consent
|
||||
)
|
||||
|
||||
event = create_event(
|
||||
correlation_id=obj.correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.FIREWALL_SETUP_RESPONSE_CREATED,
|
||||
)
|
||||
|
||||
# Emit and flush
|
||||
send_and_flush(event_bus, event)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_codebase_setup_response_created(
|
||||
event_bus: "EventBus",
|
||||
ctx: Union["CustomContext", typer.Context],
|
||||
*,
|
||||
user_consent_requested: bool,
|
||||
user_consent: Optional[bool] = None,
|
||||
) -> None:
|
||||
obj: "SafetyCLI" = ctx.obj
|
||||
|
||||
if not obj.correlation_id:
|
||||
obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
payload = CodebaseSetupResponseCreatedPayload(
|
||||
user_consent_requested=user_consent_requested, user_consent=user_consent
|
||||
)
|
||||
|
||||
event = create_event(
|
||||
correlation_id=obj.correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.CODEBASE_SETUP_RESPONSE_CREATED,
|
||||
)
|
||||
|
||||
# Emit and flush
|
||||
send_and_flush(event_bus, event)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_codebase_detection_status(
|
||||
event_bus: "EventBus",
|
||||
ctx: Union["CustomContext", typer.Context],
|
||||
*,
|
||||
detected: bool,
|
||||
detected_files: Optional[List[Path]] = None,
|
||||
) -> None:
|
||||
obj: "SafetyCLI" = ctx.obj
|
||||
|
||||
if not obj.correlation_id:
|
||||
obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
payload = CodebaseDetectionStatusPayload(
|
||||
detected=detected,
|
||||
dependency_files=[
|
||||
DependencyFile(file_path=str(file)) for file in detected_files
|
||||
]
|
||||
if detected_files
|
||||
else None,
|
||||
)
|
||||
|
||||
event = create_event(
|
||||
correlation_id=obj.correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.CODEBASE_DETECTION_STATUS,
|
||||
)
|
||||
|
||||
# Emit and flush
|
||||
send_and_flush(event_bus, event)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_init_scan_completed(
|
||||
event_bus: "EventBus",
|
||||
ctx: Union["CustomContext", typer.Context],
|
||||
*,
|
||||
scan_id: Optional[str],
|
||||
) -> None:
|
||||
obj: "SafetyCLI" = ctx.obj
|
||||
|
||||
if not obj.correlation_id:
|
||||
obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
payload = InitScanCompletedPayload(scan_id=scan_id)
|
||||
|
||||
event = create_event(
|
||||
correlation_id=obj.correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.INIT_SCAN_COMPLETED,
|
||||
)
|
||||
|
||||
# Emit and flush
|
||||
send_and_flush(event_bus, event)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_codebase_setup_completed(
|
||||
event_bus: "EventBus",
|
||||
ctx: Union["CustomContext", typer.Context],
|
||||
*,
|
||||
is_created: bool,
|
||||
codebase_id: Optional[str] = None,
|
||||
) -> None:
|
||||
obj: "SafetyCLI" = ctx.obj
|
||||
|
||||
if not obj.correlation_id:
|
||||
obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
payload = CodebaseSetupCompletedPayload(
|
||||
is_created=is_created, codebase_id=codebase_id
|
||||
)
|
||||
|
||||
event = create_event(
|
||||
correlation_id=obj.correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.CODEBASE_SETUP_COMPLETED,
|
||||
)
|
||||
|
||||
# Emit and flush
|
||||
send_and_flush(event_bus, event)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_firewall_setup_completed(
|
||||
event_bus: "EventBus",
|
||||
ctx: "CustomContext",
|
||||
*,
|
||||
status: "FirewallConfigStatus",
|
||||
) -> None:
|
||||
obj: "SafetyCLI" = ctx.obj
|
||||
|
||||
if not obj.correlation_id:
|
||||
obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
tools = status_to_tool_status(status)
|
||||
|
||||
payload = FirewallSetupCompletedPayload(
|
||||
tools=tools,
|
||||
)
|
||||
|
||||
event = create_event(
|
||||
correlation_id=obj.correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.FIREWALL_SETUP_COMPLETED,
|
||||
)
|
||||
|
||||
# Emit and flush
|
||||
send_and_flush(event_bus, event)
|
||||
|
||||
|
||||
@conditional_emitter
|
||||
def emit_init_exited(
|
||||
event_bus: "EventBus",
|
||||
ctx: Union["CustomContext", typer.Context],
|
||||
*,
|
||||
exit_step: InitExitStep,
|
||||
) -> None:
|
||||
obj: "SafetyCLI" = ctx.obj
|
||||
|
||||
if not obj.correlation_id:
|
||||
obj.correlation_id = str(uuid.uuid4())
|
||||
|
||||
payload = InitExitedPayload(exit_step=exit_step)
|
||||
|
||||
event = create_event(
|
||||
correlation_id=obj.correlation_id,
|
||||
payload=payload,
|
||||
event_type=EventType.INIT_EXITED,
|
||||
)
|
||||
|
||||
# Emit and flush
|
||||
send_and_flush(event_bus, event)
|
||||
Reference in New Issue
Block a user