updates
This commit is contained in:
@@ -0,0 +1,54 @@
|
||||
from .base import (
|
||||
STAGE_ID_MAPPING,
|
||||
AuthenticationType,
|
||||
Ecosystem,
|
||||
FileType,
|
||||
IgnoredItemDetail,
|
||||
IgnoredItems,
|
||||
PolicySource,
|
||||
ReportSchemaVersion,
|
||||
ScanType,
|
||||
Stage,
|
||||
VulnerabilitySeverityLabels,
|
||||
IgnoreCodes
|
||||
)
|
||||
from .config import ConfigModel, SecurityUpdates
|
||||
from .file import FileModel
|
||||
from .git import GITModel
|
||||
from .metadata import MetadataModel
|
||||
from .package import PythonDependency, PythonSpecification
|
||||
from .project import PolicyFileModel, ProjectModel
|
||||
from .result import DependencyResultModel
|
||||
from .scan import ReportModel
|
||||
from .telemetry import TelemetryModel
|
||||
from .vulnerability import ClosestSecureVersion, RemediationModel, Vulnerability
|
||||
|
||||
__all__ = [
|
||||
"ReportSchemaVersion",
|
||||
"Ecosystem",
|
||||
"Stage",
|
||||
"ScanType",
|
||||
"STAGE_ID_MAPPING",
|
||||
"FileType",
|
||||
"MetadataModel",
|
||||
"TelemetryModel",
|
||||
"FileModel",
|
||||
"ProjectModel",
|
||||
"ReportModel",
|
||||
"PythonDependency",
|
||||
"PythonSpecification",
|
||||
"ClosestSecureVersion",
|
||||
"RemediationModel",
|
||||
"ConfigModel",
|
||||
"SecurityUpdates",
|
||||
"DependencyResultModel",
|
||||
"PolicySource",
|
||||
"PolicyFileModel",
|
||||
"AuthenticationType",
|
||||
"GITModel",
|
||||
"Vulnerability",
|
||||
"VulnerabilitySeverityLabels",
|
||||
"IgnoredItemDetail",
|
||||
"IgnoredItems",
|
||||
"IgnoreCodes",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,7 @@
|
||||
from .events import EventApiPayload, EventBatchApiPayload
|
||||
|
||||
|
||||
__all__ = [
|
||||
"EventApiPayload",
|
||||
"EventBatchApiPayload",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,117 @@
|
||||
from typing import ClassVar, Dict, List, Type
|
||||
from typing_extensions import Annotated
|
||||
from pydantic import BaseModel, BeforeValidator, ConfigDict, model_validator
|
||||
|
||||
from ..events import Event, EventContext
|
||||
from ..events import EventType
|
||||
from ..events.payloads import (
|
||||
CommandExecutedPayload,
|
||||
CommandErrorPayload,
|
||||
ToolCommandExecutedPayload,
|
||||
PackageInstalledPayload,
|
||||
PackageUninstalledPayload,
|
||||
PackageUpdatedPayload,
|
||||
FirewallHeartbeatPayload,
|
||||
FirewallConfiguredPayload,
|
||||
FirewallDisabledPayload,
|
||||
|
||||
# Onboarding
|
||||
InitStartedPayload,
|
||||
AuthStartedPayload,
|
||||
AuthCompletedPayload,
|
||||
FirewallSetupResponseCreatedPayload,
|
||||
FirewallSetupCompletedPayload,
|
||||
CodebaseSetupResponseCreatedPayload,
|
||||
CodebaseSetupCompletedPayload,
|
||||
CodebaseDetectionStatusPayload,
|
||||
InitScanCompletedPayload,
|
||||
InitExitedPayload,
|
||||
)
|
||||
from ..events import PayloadBase
|
||||
|
||||
def convert_to_event_type(v):
|
||||
if isinstance(v, str):
|
||||
try:
|
||||
return EventType(v)
|
||||
except ValueError:
|
||||
pass
|
||||
return v
|
||||
|
||||
|
||||
class EventApiPayload(Event):
|
||||
"""
|
||||
Event object with added context information for the /events API endpoint.
|
||||
Extends the base Event to include information about the execution
|
||||
environment.
|
||||
"""
|
||||
type: Annotated[EventType, BeforeValidator(convert_to_event_type)]
|
||||
context: EventContext
|
||||
|
||||
model_config = ConfigDict(extra="allow", populate_by_name=True, strict=True)
|
||||
|
||||
# Registry of event types to payload types
|
||||
payload_types: ClassVar[Dict[EventType, Type[PayloadBase]]] = {
|
||||
EventType.COMMAND_EXECUTED: CommandExecutedPayload,
|
||||
EventType.COMMAND_ERROR: CommandErrorPayload,
|
||||
EventType.TOOL_COMMAND_EXECUTED: ToolCommandExecutedPayload,
|
||||
EventType.PACKAGE_INSTALLED: PackageInstalledPayload,
|
||||
EventType.PACKAGE_UPDATED: PackageUpdatedPayload,
|
||||
EventType.PACKAGE_UNINSTALLED: PackageUninstalledPayload,
|
||||
EventType.FIREWALL_HEARTBEAT: FirewallHeartbeatPayload,
|
||||
EventType.FIREWALL_CONFIGURED: FirewallConfiguredPayload,
|
||||
EventType.FIREWALL_DISABLED: FirewallDisabledPayload,
|
||||
|
||||
# Onboarding
|
||||
EventType.INIT_STARTED: InitStartedPayload,
|
||||
EventType.AUTH_STARTED: AuthStartedPayload,
|
||||
EventType.AUTH_COMPLETED: AuthCompletedPayload,
|
||||
EventType.FIREWALL_SETUP_RESPONSE_CREATED: FirewallSetupResponseCreatedPayload,
|
||||
EventType.FIREWALL_SETUP_COMPLETED: FirewallSetupCompletedPayload,
|
||||
EventType.CODEBASE_SETUP_RESPONSE_CREATED: CodebaseSetupResponseCreatedPayload,
|
||||
EventType.CODEBASE_SETUP_COMPLETED: CodebaseSetupCompletedPayload,
|
||||
EventType.CODEBASE_DETECTION_STATUS: CodebaseDetectionStatusPayload,
|
||||
EventType.INIT_SCAN_COMPLETED: InitScanCompletedPayload,
|
||||
EventType.INIT_EXITED: InitExitedPayload,
|
||||
}
|
||||
|
||||
@model_validator(mode='before')
|
||||
@classmethod
|
||||
def validate_payload_type(cls, data):
|
||||
if not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
event_type = data.get('type')
|
||||
payload_data = data.get('payload')
|
||||
|
||||
# Skip if either is missing
|
||||
if event_type is None or payload_data is None:
|
||||
return data
|
||||
|
||||
# Convert to enum if it's a string
|
||||
if isinstance(event_type, str):
|
||||
try:
|
||||
event_type = EventType(event_type)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Get the appropriate payload class
|
||||
payload_cls = cls.payload_types.get(event_type)
|
||||
|
||||
if payload_cls and isinstance(payload_data, dict):
|
||||
try:
|
||||
# Parse the payload with the appropriate model
|
||||
data['payload'] = payload_cls.model_validate(payload_data)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to parse payload for event type {event_type}: {e}")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class EventBatchApiPayload(BaseModel):
|
||||
"""
|
||||
A batch of events for the /events API endpoint.
|
||||
Used for efficient transport of multiple events in a single
|
||||
request/response.
|
||||
"""
|
||||
|
||||
events: List[EventApiPayload]
|
||||
@@ -0,0 +1,184 @@
|
||||
from datetime import date
|
||||
from enum import Enum
|
||||
from types import MappingProxyType
|
||||
from typing import Any, Dict, List, NewType, Optional, Set
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from .config_protocol import ConfigConvertible
|
||||
from .report_protocol import ReportConvertible
|
||||
|
||||
|
||||
class SafetyBaseModel(ReportConvertible):
|
||||
pass
|
||||
|
||||
|
||||
class SafetyConfigBaseModel(ConfigConvertible):
|
||||
pass
|
||||
|
||||
|
||||
class ReportSchemaVersion(Enum):
|
||||
v3_0 = "3.0"
|
||||
|
||||
|
||||
class PolicyConfigSchemaVersion(Enum):
|
||||
v3_0 = "3.0"
|
||||
|
||||
|
||||
class VulnerabilitySeverityLabels(Enum):
|
||||
UNKNOWN = "unknown"
|
||||
NONE = "none"
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
CRITICAL = "critical"
|
||||
|
||||
|
||||
class EPSSExploitabilityLabels(Enum):
|
||||
UNKNOWN = "unknown"
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
CRITICAL = "critical"
|
||||
|
||||
|
||||
class IgnoreCodes(Enum):
|
||||
unpinned_specification = "unpinned-specification"
|
||||
environment_dependency = "environment-dependency"
|
||||
cvss_severity = "cvss-severity"
|
||||
manual = "manual"
|
||||
|
||||
|
||||
@dataclass
|
||||
class IgnoredItemDetail:
|
||||
code: IgnoreCodes = IgnoreCodes.manual
|
||||
reason: Optional[str] = None
|
||||
expires: Optional[date] = None
|
||||
specifications: Optional[Set[Any]] = None
|
||||
|
||||
|
||||
IgnoredItems = NewType("IgnoredItems", Dict[str, IgnoredItemDetail])
|
||||
|
||||
|
||||
class ScanType(Enum):
|
||||
scan = "scan"
|
||||
system_scan = "system-scan"
|
||||
check = "check"
|
||||
|
||||
@classmethod
|
||||
def from_command(cls, command):
|
||||
return {"project": cls.scan, "system": cls.system_scan, "check": cls.check}.get(
|
||||
command.name, None
|
||||
)
|
||||
|
||||
|
||||
class Stage(str, Enum):
|
||||
development = "development"
|
||||
cicd = "cicd"
|
||||
production = "production"
|
||||
|
||||
|
||||
STAGE_ID_MAPPING = MappingProxyType(
|
||||
{Stage.development: 1, Stage.cicd: 2, Stage.production: 3}
|
||||
)
|
||||
|
||||
|
||||
class AuthenticationType(str, Enum):
|
||||
TOKEN = "token"
|
||||
API_KEY = "api_key"
|
||||
NONE = "unauthenticated"
|
||||
|
||||
def is_allowed_in(self, stage: Stage = Stage.development) -> bool:
|
||||
if self is AuthenticationType.NONE:
|
||||
return False
|
||||
|
||||
if self is AuthenticationType.API_KEY and stage is Stage.development:
|
||||
return False
|
||||
|
||||
if self is AuthenticationType.TOKEN and stage is not Stage.development:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class FileType(Enum):
|
||||
REQUIREMENTS_TXT = "requirements.txt"
|
||||
POETRY_LOCK = "poetry.lock"
|
||||
PIPENV_LOCK = "Pipfile.lock"
|
||||
SAFETY_PROJECT = ".safety-project.ini"
|
||||
VIRTUAL_ENVIRONMENT = "pyvenv.cfg"
|
||||
PYPROJECT_TOML = "pyproject.toml"
|
||||
|
||||
@property
|
||||
def ecosystem(self):
|
||||
if self in (
|
||||
FileType.REQUIREMENTS_TXT,
|
||||
FileType.POETRY_LOCK,
|
||||
FileType.PIPENV_LOCK,
|
||||
FileType.VIRTUAL_ENVIRONMENT,
|
||||
FileType.PYPROJECT_TOML,
|
||||
):
|
||||
return Ecosystem.PYTHON
|
||||
if self is FileType.SAFETY_PROJECT:
|
||||
return Ecosystem.SAFETY_PROJECT
|
||||
|
||||
return Ecosystem.UNKNOWN
|
||||
|
||||
def human_name(self, plural: bool = False):
|
||||
if self is FileType.POETRY_LOCK:
|
||||
return "Python poetry lock files" if plural else "Python poetry lock file"
|
||||
|
||||
if self is FileType.PIPENV_LOCK:
|
||||
return "Python Pipfile lock files" if plural else "Python Pipfile lock file"
|
||||
|
||||
if self is FileType.REQUIREMENTS_TXT:
|
||||
return "Python requirements files" if plural else "Python requirement file"
|
||||
|
||||
if self is FileType.VIRTUAL_ENVIRONMENT:
|
||||
return "Python environments" if plural else "Python environment"
|
||||
|
||||
if self is FileType.SAFETY_PROJECT:
|
||||
return "Safety projects" if plural else "Safety project"
|
||||
|
||||
if self is FileType.PYPROJECT_TOML:
|
||||
return "Python pyproject.toml files" if plural else "Python pyproject.toml file"
|
||||
|
||||
|
||||
class Ecosystem(Enum):
|
||||
PYTHON = "python"
|
||||
SAFETY_PROJECT = "safety_project"
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
@property
|
||||
def file_types(self) -> List[FileType]:
|
||||
if self is Ecosystem.PYTHON:
|
||||
return [
|
||||
FileType.REQUIREMENTS_TXT,
|
||||
FileType.POETRY_LOCK,
|
||||
FileType.PIPENV_LOCK,
|
||||
FileType.VIRTUAL_ENVIRONMENT,
|
||||
FileType.PYPROJECT_TOML,
|
||||
]
|
||||
if self is Ecosystem.SAFETY_PROJECT:
|
||||
return [FileType.SAFETY_PROJECT]
|
||||
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def scannable(cls) -> List["Ecosystem"]:
|
||||
return [cls.PYTHON]
|
||||
|
||||
|
||||
class PolicySource(Enum):
|
||||
local = "local"
|
||||
cloud = "cloud"
|
||||
|
||||
|
||||
class InstallationAction(Enum):
|
||||
allow = "allow"
|
||||
deny = "deny"
|
||||
|
||||
|
||||
class PackageEcosystem(Enum):
|
||||
pip = "pip"
|
||||
@@ -0,0 +1,595 @@
|
||||
import importlib
|
||||
import json
|
||||
from dataclasses import field
|
||||
from datetime import date
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Set, Optional, Union
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from .util import dict_dump
|
||||
|
||||
from ..config.schemas.v3_0 import main as v3_0
|
||||
from .base import (
|
||||
EPSSExploitabilityLabels,
|
||||
IgnoredItemDetail,
|
||||
IgnoredItems,
|
||||
PolicyConfigSchemaVersion,
|
||||
SafetyConfigBaseModel,
|
||||
VulnerabilitySeverityLabels,
|
||||
FileType,
|
||||
InstallationAction,
|
||||
PackageEcosystem,
|
||||
)
|
||||
from .ecosystem import PythonEcosystemIgnoreConfigModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class ScanConfigModel:
|
||||
max_depth: int = 6
|
||||
ignore: List[str] = field(default_factory=lambda: [])
|
||||
include_files: Dict[FileType, List[Path]] = field(default_factory=lambda: {})
|
||||
system_targets: List[str] = field(default_factory=lambda: [])
|
||||
|
||||
|
||||
@dataclass
|
||||
class FailConfig:
|
||||
enabled: bool = True
|
||||
cvss_severity: List[VulnerabilitySeverityLabels] = field(
|
||||
default_factory=lambda: [
|
||||
VulnerabilitySeverityLabels.CRITICAL,
|
||||
VulnerabilitySeverityLabels.HIGH,
|
||||
VulnerabilitySeverityLabels.MEDIUM,
|
||||
]
|
||||
)
|
||||
exploitability: List[EPSSExploitabilityLabels] = field(
|
||||
default_factory=lambda: [
|
||||
EPSSExploitabilityLabels.CRITICAL,
|
||||
EPSSExploitabilityLabels.HIGH,
|
||||
EPSSExploitabilityLabels.MEDIUM,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SecurityUpdates:
|
||||
class UpdateLevel(Enum):
|
||||
MAJOR = "major"
|
||||
MINOR = "minor"
|
||||
PATCH = "patch"
|
||||
|
||||
auto_security_updates_limit: List[UpdateLevel] = field(
|
||||
default_factory=lambda: [SecurityUpdates.UpdateLevel.PATCH]
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DependencyVulnerabilityConfig:
|
||||
enabled: bool = True
|
||||
ignore_vulnerabilities: Optional[IgnoredItems] = None
|
||||
ignore_cvss_severity: List[VulnerabilitySeverityLabels] = field(
|
||||
default_factory=lambda: []
|
||||
)
|
||||
python_ignore: PythonEcosystemIgnoreConfigModel = field(
|
||||
default_factory=lambda: PythonEcosystemIgnoreConfigModel()
|
||||
)
|
||||
fail_on: FailConfig = field(default_factory=lambda: FailConfig())
|
||||
security_updates: SecurityUpdates = field(default_factory=lambda: SecurityUpdates())
|
||||
|
||||
|
||||
@dataclass
|
||||
class AuditLoggingConfig:
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class PackageDefinition:
|
||||
ecosystem: PackageEcosystem
|
||||
specifications: List[str] = field(default_factory=lambda: [])
|
||||
|
||||
|
||||
@dataclass
|
||||
class VulnerabilityDefinition:
|
||||
reason: Optional[str] = None
|
||||
expires: Optional[date] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AllowedInstallationConfig:
|
||||
packages: List[PackageDefinition] = field(default_factory=lambda: [])
|
||||
vulnerabilities: Dict[str, VulnerabilityDefinition] = field(default_factory=lambda: {})
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeniedPackagesCriteria:
|
||||
malicious: bool = True
|
||||
age_below: Optional[str] = None
|
||||
packages: List[PackageDefinition] = field(default_factory=lambda: [])
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeniedPackagesConfig:
|
||||
warn: Optional[DeniedPackagesCriteria] = None
|
||||
block: Optional[DeniedPackagesCriteria] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeniedVulnerabilityCriteria:
|
||||
cvss_severities: List[VulnerabilitySeverityLabels] = field(default_factory=lambda: [])
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeniedVulnerabilityConfig:
|
||||
warn: DeniedVulnerabilityCriteria = field(
|
||||
default_factory=DeniedVulnerabilityCriteria
|
||||
)
|
||||
block: DeniedVulnerabilityCriteria = field(
|
||||
default_factory=DeniedVulnerabilityCriteria
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeniedInstallationConfig:
|
||||
packages: DeniedPackagesConfig = field(default_factory=DeniedPackagesConfig)
|
||||
vulnerabilities: DeniedVulnerabilityConfig = field(
|
||||
default_factory=DeniedVulnerabilityConfig
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class InstallationConfig:
|
||||
default_action: InstallationAction = InstallationAction.allow
|
||||
audit_logging: AuditLoggingConfig = field(default_factory=AuditLoggingConfig)
|
||||
allow: AllowedInstallationConfig = field(default_factory=AllowedInstallationConfig)
|
||||
deny: DeniedInstallationConfig = field(default_factory=DeniedInstallationConfig)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConfigModel(SafetyConfigBaseModel):
|
||||
telemetry_enabled: bool = True
|
||||
scan: ScanConfigModel = field(default_factory=lambda: ScanConfigModel())
|
||||
depedendency_vulnerability: DependencyVulnerabilityConfig = field(
|
||||
default_factory=lambda: DependencyVulnerabilityConfig()
|
||||
)
|
||||
installation: InstallationConfig = field(
|
||||
default_factory=lambda: InstallationConfig()
|
||||
)
|
||||
|
||||
def as_v30(self, *args: Any, **kwargs: Any) -> v3_0.SchemaModelV30:
|
||||
include_files = []
|
||||
for file_type, paths in self.scan.include_files.items():
|
||||
include_files.extend(
|
||||
[
|
||||
v3_0.IncludeFile(
|
||||
file_type=v3_0.AllowedFileType(file_type.value), path=str(p)
|
||||
)
|
||||
for p in paths
|
||||
]
|
||||
)
|
||||
|
||||
scan_config = v3_0.ScanSettings(
|
||||
max_depth=self.scan.max_depth,
|
||||
exclude=list(self.scan.ignore),
|
||||
include_files=include_files,
|
||||
system=v3_0.System(targets=self.scan.system_targets),
|
||||
)
|
||||
ignored_data: Optional[IgnoredItems] = (
|
||||
self.depedendency_vulnerability.ignore_vulnerabilities
|
||||
)
|
||||
ignored_vulns = None
|
||||
|
||||
if ignored_data:
|
||||
ignored_vulns = {
|
||||
id: v3_0.IgnoredVulnerability(
|
||||
reason=details.reason,
|
||||
expires=details.expires, # type: ignore
|
||||
specifications=details.specifications,
|
||||
) # type: ignore
|
||||
for id, details in ignored_data.items()
|
||||
}
|
||||
|
||||
ignore_severities = [
|
||||
v3_0.CVSSSeverityLabels(label.value)
|
||||
for label in self.depedendency_vulnerability.ignore_cvss_severity
|
||||
]
|
||||
|
||||
python_config = v3_0.PythonEcosystemSettings(
|
||||
ignore_environment_results=self.depedendency_vulnerability.python_ignore.environment_results,
|
||||
ignore_unpinned_requirements=self.depedendency_vulnerability.python_ignore.unpinned_specifications,
|
||||
)
|
||||
|
||||
auto_ignore = v3_0.AutoIgnoreInReportDependencyVulnerabilities(
|
||||
python=python_config,
|
||||
vulnerabilities=ignored_vulns,
|
||||
cvss_severity=ignore_severities,
|
||||
)
|
||||
|
||||
report_on_config = v3_0.Report(
|
||||
dependency_vulnerabilities=v3_0.ReportDependencyVulnerabilities(
|
||||
enabled=self.depedendency_vulnerability.enabled, auto_ignore=auto_ignore
|
||||
)
|
||||
)
|
||||
|
||||
update_limit = [
|
||||
v3_0.SecurityUpdatesLimits(label.value)
|
||||
for label in self.depedendency_vulnerability.security_updates.auto_security_updates_limit # noqa: E501
|
||||
]
|
||||
|
||||
updates = v3_0.SecurityUpdatesSettings(
|
||||
dependency_vulnerabilities=v3_0.SecurityUpdatesDependencyVulnerabilities(
|
||||
auto_security_updates_limit=update_limit
|
||||
)
|
||||
)
|
||||
|
||||
fail_on_severity = [
|
||||
v3_0.CVSSSeverityLabels(label.value)
|
||||
for label in self.depedendency_vulnerability.fail_on.cvss_severity
|
||||
]
|
||||
|
||||
fail_on_exploitability = [
|
||||
v3_0.EPSSExploitabilityLabels(label.value)
|
||||
for label in self.depedendency_vulnerability.fail_on.exploitability
|
||||
]
|
||||
|
||||
fail_scan = v3_0.FailScan(
|
||||
dependency_vulnerabilities=v3_0.FailScanDependencyVulnerabilities(
|
||||
enabled=self.depedendency_vulnerability.fail_on.enabled,
|
||||
fail_on_any_of=v3_0.FailOnAnyOf(
|
||||
cvss_severity=fail_on_severity,
|
||||
exploitability=fail_on_exploitability,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
allowed_packages = self.__map_model_packages(self.installation.allow.packages)
|
||||
allowed_vulnerabilities = self.__map_model_vulnerabilities(
|
||||
self.installation.allow.vulnerabilities
|
||||
)
|
||||
warn_vulnerabilities = self.__map_model_cvss_severities(
|
||||
self.installation.deny.vulnerabilities.warn
|
||||
)
|
||||
block_vulnerabilities = self.__map_model_cvss_severities(
|
||||
self.installation.deny.vulnerabilities.block
|
||||
)
|
||||
|
||||
installation = v3_0.Installation(
|
||||
default_action=v3_0.InstallationAction(
|
||||
self.installation.default_action.value
|
||||
),
|
||||
audit_logging=v3_0.AuditLogging(
|
||||
enabled=self.installation.audit_logging.enabled
|
||||
),
|
||||
allow=v3_0.AllowedInstallation(
|
||||
packages=allowed_packages, vulnerabilities=allowed_vulnerabilities
|
||||
),
|
||||
deny=v3_0.DeniedInstallation(
|
||||
packages=v3_0.DeniedPackage(
|
||||
warning_on_any_of=self.__map_to_denied_package_criteria(self.installation.deny.packages.warn),
|
||||
block_on_any_of=self.__map_to_denied_package_criteria(self.installation.deny.packages.block),
|
||||
),
|
||||
vulnerabilities=v3_0.DeniedVulnerability(
|
||||
warning_on_any_of=v3_0.DeniedVulnerabilityCriteria(cvss_severity=warn_vulnerabilities),
|
||||
block_on_any_of=v3_0.DeniedVulnerabilityCriteria(cvss_severity=block_vulnerabilities),
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
return v3_0.Config(
|
||||
scan=scan_config,
|
||||
report=report_on_config,
|
||||
fail_scan=fail_scan,
|
||||
security_updates=updates,
|
||||
installation=installation,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_v30(cls, obj: v3_0.SchemaModelV30) -> "ConfigModel":
|
||||
if not isinstance(obj, v3_0.Config):
|
||||
raise TypeError("Expected instance of v3_0.Config")
|
||||
|
||||
scan = ScanConfigModel()
|
||||
dep_vuln = DependencyVulnerabilityConfig()
|
||||
installation = InstallationConfig()
|
||||
|
||||
if obj.scan:
|
||||
if obj.scan.max_depth:
|
||||
scan.max_depth = obj.scan.max_depth
|
||||
|
||||
if obj.scan.exclude:
|
||||
scan.ignore = obj.scan.exclude
|
||||
|
||||
if obj.scan.include_files:
|
||||
for include_file in obj.scan.include_files:
|
||||
file_type = FileType(include_file.file_type.value)
|
||||
|
||||
if file_type not in scan.include_files:
|
||||
scan.include_files[file_type] = []
|
||||
|
||||
scan.include_files[file_type].append(Path(include_file.path))
|
||||
|
||||
if obj.scan.system and obj.scan.system.targets:
|
||||
scan.system_targets = obj.scan.system.targets
|
||||
|
||||
if obj.report and obj.report.dependency_vulnerabilities:
|
||||
if obj.report.dependency_vulnerabilities.enabled:
|
||||
dep_vuln.enabled = obj.report.dependency_vulnerabilities.enabled
|
||||
|
||||
auto_ignore = obj.report.dependency_vulnerabilities.auto_ignore
|
||||
|
||||
if auto_ignore:
|
||||
vulns_to_ignore = auto_ignore.vulnerabilities
|
||||
|
||||
if vulns_to_ignore:
|
||||
dep_vuln.ignore_vulnerabilities = IgnoredItems(
|
||||
{
|
||||
vuln_id: IgnoredItemDetail(**dict_dump(ignore_details))
|
||||
for vuln_id, ignore_details in vulns_to_ignore.items()
|
||||
}
|
||||
)
|
||||
|
||||
if auto_ignore.python:
|
||||
kwargs = {}
|
||||
|
||||
if auto_ignore.python.ignore_unpinned_requirements is not None:
|
||||
kwargs["unpinned_specifications"] = bool(
|
||||
auto_ignore.python.ignore_unpinned_requirements
|
||||
)
|
||||
|
||||
if auto_ignore.python.ignore_environment_results is not None:
|
||||
kwargs["environment_results"] = bool(
|
||||
auto_ignore.python.ignore_environment_results
|
||||
)
|
||||
|
||||
dep_vuln.python_ignore = PythonEcosystemIgnoreConfigModel(**kwargs)
|
||||
|
||||
if auto_ignore.cvss_severity:
|
||||
dep_vuln.ignore_cvss_severity = [
|
||||
VulnerabilitySeverityLabels(label.value)
|
||||
for label in auto_ignore.cvss_severity
|
||||
]
|
||||
|
||||
if obj.fail_scan and obj.fail_scan.dependency_vulnerabilities:
|
||||
fail_on = obj.fail_scan.dependency_vulnerabilities
|
||||
|
||||
if fail_on.enabled is not None:
|
||||
dep_vuln.fail_on.enabled = bool(fail_on.enabled)
|
||||
|
||||
if fail_on.fail_on_any_of:
|
||||
if fail_on.fail_on_any_of.cvss_severity:
|
||||
dep_vuln.fail_on.cvss_severity = [
|
||||
VulnerabilitySeverityLabels(label.value)
|
||||
for label in fail_on.fail_on_any_of.cvss_severity
|
||||
]
|
||||
|
||||
if fail_on.fail_on_any_of.exploitability:
|
||||
dep_vuln.fail_on.exploitability = [
|
||||
EPSSExploitabilityLabels(label.value)
|
||||
for label in fail_on.fail_on_any_of.exploitability
|
||||
]
|
||||
|
||||
if obj.security_updates and obj.security_updates.dependency_vulnerabilities:
|
||||
auto_security_limits = obj.security_updates.dependency_vulnerabilities.auto_security_updates_limit
|
||||
|
||||
if auto_security_limits:
|
||||
dep_vuln.security_updates = SecurityUpdates(
|
||||
[
|
||||
SecurityUpdates.UpdateLevel(level.value)
|
||||
for level in auto_security_limits
|
||||
]
|
||||
)
|
||||
|
||||
if obj.installation:
|
||||
installation.default_action = InstallationAction(
|
||||
obj.installation.default_action.value
|
||||
)
|
||||
|
||||
if obj.installation.audit_logging:
|
||||
installation.audit_logging = AuditLoggingConfig(
|
||||
installation.audit_logging.enabled
|
||||
)
|
||||
|
||||
if obj.installation.allow:
|
||||
installation.allow = AllowedInstallationConfig()
|
||||
|
||||
if obj.installation.allow.packages:
|
||||
installation.allow.packages = ConfigModel.__map_schema_packages(
|
||||
obj.installation.allow.packages
|
||||
)
|
||||
|
||||
if obj.installation.allow.vulnerabilities:
|
||||
installation.allow.vulnerabilities = (
|
||||
ConfigModel.__map_schema_vulnerabilities(
|
||||
obj.installation.allow.vulnerabilities
|
||||
)
|
||||
)
|
||||
|
||||
if obj.installation.deny:
|
||||
installation.deny = DeniedInstallationConfig()
|
||||
|
||||
if obj.installation.deny.packages:
|
||||
installation.deny.packages = DeniedPackagesConfig()
|
||||
if obj.installation.deny.packages.warning_on_any_of:
|
||||
installation.deny.packages.warn = (
|
||||
ConfigModel.__map_schema_denied_packages(
|
||||
obj.installation.deny.packages.warning_on_any_of
|
||||
)
|
||||
)
|
||||
|
||||
if obj.installation.deny.packages.block_on_any_of:
|
||||
installation.deny.packages.block = (
|
||||
ConfigModel.__map_schema_denied_packages(
|
||||
obj.installation.deny.packages.block_on_any_of
|
||||
)
|
||||
)
|
||||
|
||||
if obj.installation.deny.vulnerabilities:
|
||||
installation.deny.vulnerabilities = DeniedVulnerabilityConfig()
|
||||
if obj.installation.deny.vulnerabilities.warning_on_any_of:
|
||||
installation.deny.vulnerabilities.warn = (
|
||||
ConfigModel.__map_schema_denied_vulnerabilities(
|
||||
obj.installation.deny.vulnerabilities.warning_on_any_of
|
||||
)
|
||||
)
|
||||
|
||||
if obj.installation.deny.vulnerabilities.block_on_any_of:
|
||||
installation.deny.vulnerabilities.block = (
|
||||
ConfigModel.__map_schema_denied_vulnerabilities(
|
||||
obj.installation.deny.vulnerabilities.block_on_any_of
|
||||
)
|
||||
)
|
||||
|
||||
return ConfigModel(
|
||||
scan=scan, depedendency_vulnerability=dep_vuln, installation=installation
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def parse_policy_file(
|
||||
cls,
|
||||
raw_report: Union[str, Path],
|
||||
schema: PolicyConfigSchemaVersion = PolicyConfigSchemaVersion.v3_0,
|
||||
) -> "ConfigModel":
|
||||
if isinstance(raw_report, Path):
|
||||
raw_report = raw_report.expanduser().resolve()
|
||||
with open(raw_report) as f:
|
||||
raw_report = f.read()
|
||||
|
||||
try:
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
yaml = YAML(typ="safe", pure=True)
|
||||
yml_raw = yaml.load(raw_report)
|
||||
except Exception:
|
||||
raise ValueError("Broken YAML file.")
|
||||
|
||||
parse = "parse_obj"
|
||||
target_schema = schema.value.replace(".", "_")
|
||||
module_name = (
|
||||
"safety_schemas." "config.schemas." f"v{target_schema}.main"
|
||||
) # Example: Selecting v1_1
|
||||
|
||||
module = importlib.import_module(module_name)
|
||||
config_model = module.Config
|
||||
|
||||
# This will raise a validation error if the content is wrong
|
||||
validated_policy_file = getattr(config_model, parse)(yml_raw)
|
||||
|
||||
# TODO: Select the from from the version passed
|
||||
return ConfigModel.from_v30(obj=validated_policy_file)
|
||||
|
||||
def save_policy_file(self, dest: Path):
|
||||
POLICY_NAME = ".safety-policy.yml"
|
||||
|
||||
dest = dest.expanduser().resolve()
|
||||
if dest.is_dir():
|
||||
dest = dest / POLICY_NAME
|
||||
policy_config = self.as_v30().json(by_alias=True, exclude_none=True)
|
||||
|
||||
from ruamel.yaml.emitter import Emitter
|
||||
|
||||
class MyEmitter(Emitter):
|
||||
def expect_block_mapping_key(self, first=False):
|
||||
if len(self.indents) == 1 and not first:
|
||||
self.write_line_break()
|
||||
self.write_line_break()
|
||||
super().expect_block_mapping_key(first)
|
||||
|
||||
try:
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
yaml = YAML(typ="safe", pure=True)
|
||||
yaml.default_flow_style = False
|
||||
yaml.sort_base_mapping_type_on_output = False
|
||||
yaml.indent(mapping=2, sequence=4, offset=2)
|
||||
yaml.Emitter = MyEmitter
|
||||
|
||||
with open(dest, "w") as f:
|
||||
yaml.dump(json.loads(policy_config), f)
|
||||
|
||||
except Exception as e:
|
||||
raise ValueError(f"Unable to generate or save YAML, {e}")
|
||||
|
||||
@staticmethod
|
||||
def __map_model_packages(
|
||||
packages: List[PackageDefinition],
|
||||
) -> List[v3_0.PackageDefinition]:
|
||||
return [
|
||||
v3_0.PackageDefinition(
|
||||
ecosystem=v3_0.PackageEcosystem(package.ecosystem.value),
|
||||
specifications=package.specifications,
|
||||
)
|
||||
for package in packages
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def __map_model_vulnerabilities(
|
||||
vulnerabilities: Dict[str, VulnerabilityDefinition],
|
||||
) -> Dict[str, v3_0.IgnoredVulnerability]:
|
||||
return {
|
||||
id: v3_0.IgnoredVulnerability(
|
||||
reason=vulnerability.reason, expire=vulnerability.expires
|
||||
)
|
||||
for id, vulnerability in vulnerabilities.items()
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def __map_model_cvss_severities(
|
||||
vulnerabilities: DeniedVulnerabilityCriteria,
|
||||
) -> List[v3_0.CVSSSeverityLabels]:
|
||||
return [
|
||||
v3_0.CVSSSeverityLabels(severity.value)
|
||||
for severity in vulnerabilities.cvss_severities
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def __map_schema_denied_packages(cls, package_criteria: v3_0.DeniedPackageCriteria) -> DeniedPackagesCriteria:
|
||||
result = DeniedPackagesCriteria()
|
||||
result.malicious = package_criteria.malicious
|
||||
result.age_below = package_criteria.age_below
|
||||
result.packages = ConfigModel.__map_schema_packages(package_criteria.packages)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def __map_schema_denied_vulnerabilities(
|
||||
cls, vulnerability_criteria: v3_0.DeniedVulnerabilityCriteria
|
||||
) -> DeniedVulnerabilityCriteria:
|
||||
result = DeniedVulnerabilityCriteria()
|
||||
result.cvss_severities = ConfigModel.__map_schema_vulnerability_severities(
|
||||
vulnerability_criteria
|
||||
)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def __map_schema_packages(cls, packages: List[v3_0.PackageDefinition]) -> List[PackageDefinition]:
|
||||
return [
|
||||
PackageDefinition(
|
||||
PackageEcosystem(package.ecosystem.value),
|
||||
package.specifications,
|
||||
)
|
||||
for package in packages
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def __map_schema_vulnerabilities(cls, vulnerabilities: Dict[str, v3_0.IgnoredVulnerability]) -> List[VulnerabilityDefinition]:
|
||||
return {
|
||||
id: VulnerabilityDefinition(vuln.reason, vuln.expires)
|
||||
for id, vuln in vulnerabilities.items()
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def __map_schema_vulnerability_severities(cls, vulnerabilities: Set[v3_0.CVSSSeverityLabels]) -> List[VulnerabilitySeverityLabels]:
|
||||
return [
|
||||
VulnerabilitySeverityLabels(severity.value)
|
||||
for severity in vulnerabilities.cvss_severity
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def __map_to_denied_package_criteria(cls, package_criteria: Optional[v3_0.DeniedPackageCriteria]):
|
||||
if package_criteria is None:
|
||||
return None
|
||||
|
||||
return v3_0.DeniedPackageCriteria(
|
||||
malicious=package_criteria.malicious,
|
||||
age_below=package_criteria.age_below,
|
||||
packages=cls.__map_model_packages(package_criteria.packages),
|
||||
)
|
||||
@@ -0,0 +1,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .base import SafetyConfigBaseModel
|
||||
|
||||
from ..config.schemas.v3_0 import main as v3_0
|
||||
|
||||
NOT_IMPLEMENTED_ERROR_MSG = (
|
||||
"Needs implementation for the specific " "schema version export."
|
||||
)
|
||||
|
||||
|
||||
class ConfigConvertible(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def as_v30(self) -> v3_0.SchemaModelV30:
|
||||
raise NotImplementedError(NOT_IMPLEMENTED_ERROR_MSG)
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def from_v30(cls, obj: v3_0.SchemaModelV30) -> SafetyConfigBaseModel:
|
||||
raise NotImplementedError(NOT_IMPLEMENTED_ERROR_MSG)
|
||||
@@ -0,0 +1,12 @@
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class EcosystemIgnoreConfigModel:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class PythonEcosystemIgnoreConfigModel(EcosystemIgnoreConfigModel):
|
||||
unpinned_specifications: bool = True
|
||||
environment_results: bool = True
|
||||
@@ -0,0 +1,15 @@
|
||||
from .base import Event, PayloadBase
|
||||
from .context import EventContext
|
||||
|
||||
from .types import EventTypeBase, EventType, ParamSource, SourceType
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Event",
|
||||
"PayloadBase",
|
||||
"EventContext",
|
||||
"EventTypeBase",
|
||||
"EventType",
|
||||
"ParamSource",
|
||||
"SourceType",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,39 @@
|
||||
from typing import Generic, Optional, TypeVar
|
||||
from typing_extensions import Annotated
|
||||
import uuid
|
||||
from pydantic import UUID4, BaseModel, BeforeValidator, Field
|
||||
|
||||
from .types import EventTypeBase, SourceType
|
||||
|
||||
|
||||
class PayloadBase(BaseModel):
|
||||
"""
|
||||
Base class for all event payloads
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
# Generics
|
||||
PayloadT = TypeVar("PayloadT", bound=PayloadBase)
|
||||
EventTypeT = TypeVar("EventTypeT", bound=EventTypeBase)
|
||||
|
||||
|
||||
def convert_to_source_type(v):
|
||||
if isinstance(v, str):
|
||||
try:
|
||||
return SourceType(v)
|
||||
except ValueError:
|
||||
pass
|
||||
return v
|
||||
|
||||
|
||||
class Event(BaseModel, Generic[EventTypeT, PayloadT]):
|
||||
id: UUID4 = Field(default_factory=lambda: uuid.uuid4())
|
||||
timestamp: int
|
||||
type: EventTypeT
|
||||
source: Annotated[SourceType, BeforeValidator(convert_to_source_type)]
|
||||
correlation_id: Optional[str] = Field(
|
||||
description="Unique identifier for tracing related events",
|
||||
default=None,
|
||||
)
|
||||
payload: PayloadT
|
||||
@@ -0,0 +1,9 @@
|
||||
SAFETY_NAMESPACE = "safetycli"
|
||||
PRODUCT_CLI = "cli"
|
||||
GITHUB = "github"
|
||||
PYPI = "pypi"
|
||||
DOCKER = "docker"
|
||||
ACTION = "action"
|
||||
APP = "app"
|
||||
|
||||
CLI_SOURCE = f"urn:{SAFETY_NAMESPACE}:{PRODUCT_CLI}"
|
||||
@@ -0,0 +1,114 @@
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
|
||||
from .types import SourceType
|
||||
|
||||
from typing_extensions import Annotated
|
||||
from pydantic.types import StringConstraints
|
||||
|
||||
class ClientInfo(BaseModel):
|
||||
"""
|
||||
Information about the client application.
|
||||
"""
|
||||
|
||||
identifier: SourceType = Field(description="Client source identifier name")
|
||||
version: str = Field(description="Client application version")
|
||||
path: str = Field(description="Path to the client executable")
|
||||
|
||||
|
||||
class ProjectInfo(BaseModel):
|
||||
"""
|
||||
Information about the project context.
|
||||
"""
|
||||
|
||||
id: str = Field(default="unknown", description="Project identifier")
|
||||
url: Optional[str] = Field(default=None, description="Project URL")
|
||||
|
||||
|
||||
class UserInfo(BaseModel):
|
||||
"""
|
||||
Information about the user.
|
||||
"""
|
||||
|
||||
name: str = Field(description="Username")
|
||||
home_dir: str = Field(description="User's home directory")
|
||||
|
||||
|
||||
class OsInfo(BaseModel):
|
||||
"""
|
||||
Information about the operating system.
|
||||
"""
|
||||
architecture: Annotated[str, StringConstraints(to_lower=True)] = Field(description="Machine architecture")
|
||||
platform: Annotated[str, StringConstraints(to_lower=True)] = Field(description="Operating system platform")
|
||||
name: Annotated[Optional[str], StringConstraints(to_lower=True)] = Field(description="Operating system name")
|
||||
version: Annotated[Optional[str], StringConstraints(to_lower=True)] = Field(description="Operating system version")
|
||||
kernel_version: Annotated[Optional[str], StringConstraints(to_lower=True)] = Field(
|
||||
default=None, description="Kernel version if available"
|
||||
)
|
||||
|
||||
|
||||
class HostInfo(BaseModel):
|
||||
"""
|
||||
Information about the host machine.
|
||||
"""
|
||||
|
||||
name: str = Field(description="Hostname")
|
||||
ipv4: Optional[str] = Field(default=None, description="IPv4 address")
|
||||
ipv6: Optional[str] = Field(default=None, description="IPv6 address")
|
||||
timezone: Optional[str] = Field(default=None, description="Timezone")
|
||||
|
||||
|
||||
class PythonInfo(BaseModel):
|
||||
"""
|
||||
Detailed information about the Python environment.
|
||||
"""
|
||||
|
||||
version: str = Field(description="Python version (major.minor)")
|
||||
path: str = Field(description="Path to the Python executable")
|
||||
sys_path: List[str] = Field(description="Python sys.path")
|
||||
implementation: Optional[str] = Field(
|
||||
default=None, description="Python implementation (e.g., 'CPython')"
|
||||
)
|
||||
implementation_version: Optional[str] = Field(
|
||||
default=None, description="Python implementation version"
|
||||
)
|
||||
|
||||
sys_prefix: str = Field(description="sys.prefix location")
|
||||
site_packages: List[str] = Field(description="List of site-packages directories")
|
||||
user_site_enabled: bool = Field(
|
||||
description="Whether user site-packages are enabled for imports"
|
||||
)
|
||||
user_site_packages: Optional[str] = Field(
|
||||
default=None, description="User site-packages directory path if available"
|
||||
)
|
||||
|
||||
encoding: str = Field(description="Default string encoding")
|
||||
filesystem_encoding: str = Field(description="Filesystem encoding")
|
||||
|
||||
|
||||
class RuntimeInfo(BaseModel):
|
||||
"""
|
||||
Information about the runtime environment.
|
||||
"""
|
||||
|
||||
workdir: str = Field(description="Working directory")
|
||||
user: UserInfo = Field(description="User information")
|
||||
os: OsInfo = Field(description="Operating system information")
|
||||
host: HostInfo = Field(description="Host information")
|
||||
python: Optional[PythonInfo] = Field(default=None, description="Python information")
|
||||
|
||||
|
||||
class EventContext(BaseModel):
|
||||
"""
|
||||
Complete context information for an event.
|
||||
Contains details about the client, project, and runtime environment.
|
||||
"""
|
||||
|
||||
client: ClientInfo = Field(description="Client application information")
|
||||
runtime: RuntimeInfo = Field(description="Runtime environment information")
|
||||
project: Optional[ProjectInfo] = Field(
|
||||
default=None, description="Project information"
|
||||
)
|
||||
tags: Optional[List[str]] = Field(
|
||||
default=None, description="Event tags for categorization"
|
||||
)
|
||||
@@ -0,0 +1,68 @@
|
||||
from .main import (
|
||||
CommandParam,
|
||||
CommandExecutedPayload,
|
||||
CommandErrorPayload,
|
||||
PackagePayloadBase,
|
||||
SingleVersionPackagePayload,
|
||||
ToolCommandExecutedPayload,
|
||||
PackageInstalledPayload,
|
||||
PackageUninstalledPayload,
|
||||
PackageUpdatedPayload,
|
||||
HealthCheckResult,
|
||||
IndexConfig,
|
||||
AliasConfig,
|
||||
ToolStatus,
|
||||
FirewallConfiguredPayload,
|
||||
FirewallDisabledPayload,
|
||||
FirewallHeartbeatPayload,
|
||||
ProcessStatus,
|
||||
)
|
||||
|
||||
from .onboarding import (
|
||||
InitStartedPayload,
|
||||
AuthStartedPayload,
|
||||
AuthCompletedPayload,
|
||||
FirewallSetupResponseCreatedPayload,
|
||||
FirewallSetupCompletedPayload,
|
||||
CodebaseSetupResponseCreatedPayload,
|
||||
CodebaseSetupCompletedPayload,
|
||||
DependencyFile,
|
||||
CodebaseDetectionStatusPayload,
|
||||
InitScanCompletedPayload,
|
||||
InitExitStep,
|
||||
InitExitedPayload,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"CommandParam",
|
||||
"CommandExecutedPayload",
|
||||
"CommandErrorPayload",
|
||||
"PackagePayloadBase",
|
||||
"SingleVersionPackagePayload",
|
||||
"PackageInstalledPayload",
|
||||
"PackageUninstalledPayload",
|
||||
"PackageUpdatedPayload",
|
||||
"HealthCheckResult",
|
||||
"IndexConfig",
|
||||
"AliasConfig",
|
||||
"ToolStatus",
|
||||
"FirewallConfiguredPayload",
|
||||
"FirewallDisabledPayload",
|
||||
"FirewallHeartbeatPayload",
|
||||
"ProcessStatus",
|
||||
"ToolCommandExecutedPayload",
|
||||
|
||||
# Onboarding
|
||||
"InitStartedPayload",
|
||||
"AuthStartedPayload",
|
||||
"AuthCompletedPayload",
|
||||
"FirewallSetupResponseCreatedPayload",
|
||||
"FirewallSetupCompletedPayload",
|
||||
"CodebaseSetupResponseCreatedPayload",
|
||||
"CodebaseSetupCompletedPayload",
|
||||
"DependencyFile",
|
||||
"CodebaseDetectionStatusPayload",
|
||||
"InitScanCompletedPayload",
|
||||
"InitExitStep",
|
||||
"InitExitedPayload",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,223 @@
|
||||
from typing import Any, List, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..base import PayloadBase
|
||||
from ..types import LimitedStr, ParamSource, StackTrace, StdErr, StdOut, ToolType
|
||||
|
||||
|
||||
class CommandParam(BaseModel):
|
||||
position: int = Field(description="Position in the original command")
|
||||
name: Optional[LimitedStr] = Field(
|
||||
default=None, description="Name of the option, None for positional arguments"
|
||||
)
|
||||
value: Any = Field(description="Value of the argument or option")
|
||||
source: ParamSource = Field(
|
||||
ParamSource.UNKNOWN,
|
||||
description="Source of the parameter value (commandline, environment, config, default, prompt)",
|
||||
)
|
||||
|
||||
@property
|
||||
def is_option(self) -> bool:
|
||||
"""
|
||||
Return True if this is a named option, False if positional argument
|
||||
"""
|
||||
return self.name is not None
|
||||
|
||||
|
||||
class ProcessStatus(BaseModel):
|
||||
stdout: Optional[StdOut] = Field(
|
||||
default=None, description="Standard output of the process"
|
||||
)
|
||||
stderr: Optional[StdErr] = Field(
|
||||
default=None, description="Standard error of the process"
|
||||
)
|
||||
return_code: int = Field(description="Return code of the process")
|
||||
|
||||
|
||||
class CommandExecutedPayload(PayloadBase):
|
||||
command_name: str = Field(
|
||||
description="Primary command name (e.g., 'status', 'scan')"
|
||||
)
|
||||
command_path: List[LimitedStr] = Field(
|
||||
description="Command path as a list (e.g., ['safety', 'auth', 'login'])"
|
||||
)
|
||||
raw_command: List[LimitedStr] = Field(
|
||||
description="Complete command as a list (equivalent to sys.argv)"
|
||||
)
|
||||
parameters: List[CommandParam] = Field(
|
||||
description="Parameters defined by the us", default_factory=list
|
||||
)
|
||||
duration_ms: int = Field(
|
||||
gt=0,
|
||||
description="Execution time in milliseconds for the full command "
|
||||
"including any tool call",
|
||||
)
|
||||
status: ProcessStatus = Field(
|
||||
description="Status data (stdout/stderr/return_code) when applicable"
|
||||
)
|
||||
|
||||
|
||||
class ToolCommandExecutedPayload(PayloadBase):
|
||||
"""
|
||||
Information about a wrapped command execution.
|
||||
"""
|
||||
tool: ToolType = Field(
|
||||
description="Tool Type (e.g., 'pip', 'uv', 'poetry', 'npm')"
|
||||
)
|
||||
tool_path: Optional[str] = Field(default=None, description="Absolute path to the tool's executable")
|
||||
raw_command: List[LimitedStr] = Field(
|
||||
description="Complete command as a list (equivalent to sys.argv)"
|
||||
)
|
||||
duration_ms: int = Field(
|
||||
gt=0,
|
||||
description="Execution time in milliseconds",
|
||||
)
|
||||
status: ProcessStatus = Field(
|
||||
description="Status data (stdout/stderr/return_code) when applicable"
|
||||
)
|
||||
|
||||
|
||||
class CommandErrorPayload(PayloadBase):
|
||||
command_name: Optional[LimitedStr] = Field(
|
||||
description="Name of the command that failed"
|
||||
)
|
||||
command_path: Optional[List[LimitedStr]] = Field(
|
||||
description="Command path as a list (e.g., ['safety', 'auth', 'login'])"
|
||||
)
|
||||
raw_command: List[LimitedStr] = Field(
|
||||
description="Complete command as a list (equivalent to sys.argv)"
|
||||
)
|
||||
error_message: str = Field(description="Error message")
|
||||
stacktrace: Optional[StackTrace] = Field(
|
||||
default=None, description="Stack trace if available"
|
||||
)
|
||||
|
||||
|
||||
class PackagePayloadBase(PayloadBase):
|
||||
package_name: str = Field(description="Name of the package")
|
||||
tool: ToolType = Field(description="ToolType used (e.g., pip, conda)")
|
||||
tool_path: Optional[str] = Field(default=None, description="Absolute path to the tool's executable")
|
||||
location: Optional[str] = Field(default=None, description="Location of the package")
|
||||
|
||||
|
||||
class SingleVersionPackagePayload(PackagePayloadBase):
|
||||
version: str = Field(description="Version of the package")
|
||||
|
||||
|
||||
class PackageInstalledPayload(SingleVersionPackagePayload):
|
||||
pass
|
||||
|
||||
|
||||
class PackageUninstalledPayload(SingleVersionPackagePayload):
|
||||
pass
|
||||
|
||||
|
||||
class PackageUpdatedPayload(PackagePayloadBase):
|
||||
previous_version: str = Field(description="Previous package version")
|
||||
current_version: str = Field(description="Current package version")
|
||||
|
||||
|
||||
class HealthCheckResult(BaseModel):
|
||||
"""
|
||||
Generic health check result structure.
|
||||
"""
|
||||
|
||||
is_alive: bool = Field(description="Whether the entity is alive and responding")
|
||||
response_time_ms: Optional[int] = Field(
|
||||
None, description="Response time in milliseconds"
|
||||
)
|
||||
error_message: Optional[LimitedStr] = Field(
|
||||
None, description="Error message if any"
|
||||
)
|
||||
timestamp: str = Field(description="When the health check was performed")
|
||||
|
||||
|
||||
class IndexConfig(BaseModel):
|
||||
"""
|
||||
Configuration details for the package index.
|
||||
"""
|
||||
|
||||
is_configured: bool = Field(
|
||||
description="Whether the index configuration is in place"
|
||||
)
|
||||
index_url: Optional[LimitedStr] = Field(
|
||||
default=None, description="URL of the configured package index"
|
||||
)
|
||||
health_check: Optional[HealthCheckResult] = Field(
|
||||
default=None, description="Health check for the index"
|
||||
)
|
||||
|
||||
|
||||
class AliasConfig(BaseModel):
|
||||
"""
|
||||
Configuration details for the command alias.
|
||||
"""
|
||||
|
||||
is_configured: bool = Field(description="Whether the alias is configured")
|
||||
alias_content: Optional[LimitedStr] = Field(
|
||||
default=None, description="Content of the alias"
|
||||
)
|
||||
health_check: Optional[HealthCheckResult] = Field(
|
||||
default=None, description="Health check for the alias"
|
||||
)
|
||||
|
||||
|
||||
class ToolStatus(BaseModel):
|
||||
"""
|
||||
Status of a single package manager tool. A single package manager tool is
|
||||
being identified by its executable path.
|
||||
"""
|
||||
|
||||
type: ToolType = Field(description="Tool type")
|
||||
command_path: str = Field(description="Absolute path to the tool's executable")
|
||||
version: str = Field(description="Version of the tool")
|
||||
reachable: bool = Field(
|
||||
description="Whether the tool's package manager is reachable bypassing any firewall setup"
|
||||
)
|
||||
|
||||
# Configuration information
|
||||
alias_config: Optional[AliasConfig] = Field(
|
||||
default=None, description="Details about the alias configuration"
|
||||
)
|
||||
index_config: Optional[IndexConfig] = Field(
|
||||
default=None, description="Details about the index configuration"
|
||||
)
|
||||
|
||||
@property
|
||||
def alias_configured(self) -> bool:
|
||||
"""
|
||||
Whether the alias is configured.
|
||||
"""
|
||||
return self.alias_config is not None and self.alias_config.is_configured
|
||||
|
||||
@property
|
||||
def index_configured(self) -> bool:
|
||||
"""
|
||||
Whether the index is configured.
|
||||
"""
|
||||
return self.index_config is not None and self.index_config.is_configured
|
||||
|
||||
@property
|
||||
def is_configured(self) -> bool:
|
||||
"""
|
||||
Returns whether the tool is fully configured (both alias and index).
|
||||
"""
|
||||
return self.alias_configured and self.index_configured
|
||||
|
||||
|
||||
class FirewallConfiguredPayload(PayloadBase):
|
||||
tools: List[ToolStatus] = Field(
|
||||
description="Status of all detected package manager tools"
|
||||
)
|
||||
|
||||
|
||||
class FirewallDisabledPayload(PayloadBase):
|
||||
reason: Optional[LimitedStr] = Field(
|
||||
description="Reason for disabling the firewall"
|
||||
)
|
||||
|
||||
|
||||
class FirewallHeartbeatPayload(PayloadBase):
|
||||
tools: List[ToolStatus] = Field(
|
||||
description="Status of all detected package manager tools"
|
||||
)
|
||||
@@ -0,0 +1,136 @@
|
||||
from typing import List, Optional, Dict, Any
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..base import PayloadBase
|
||||
from ..types import LimitedStr, ToolType
|
||||
from .main import ToolStatus
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class InitStartedPayload(PayloadBase):
|
||||
"""
|
||||
Payload for the Init Started event.
|
||||
This is emitted when the init command is started.
|
||||
Note: This event is typically delayed until the user completes authentication.
|
||||
"""
|
||||
# This is an empty payload as the timestamp is already in the event
|
||||
pass
|
||||
|
||||
|
||||
class AuthStartedPayload(PayloadBase):
|
||||
"""
|
||||
Payload for the Auth Started event.
|
||||
This is emitted when the authentication flow is initiated and a URL is shown to the user.
|
||||
"""
|
||||
auth_url: Optional[LimitedStr] = Field(
|
||||
default=None, description="URL provided to the user for authentication"
|
||||
)
|
||||
|
||||
|
||||
class AuthCompletedPayload(PayloadBase):
|
||||
"""
|
||||
Payload for the Auth Completed event.
|
||||
This is emitted when the authentication flow is completed.
|
||||
"""
|
||||
success: bool = Field(description="Whether authentication was successful")
|
||||
error_message: Optional[LimitedStr] = Field(
|
||||
default=None, description="Error message if authentication failed"
|
||||
)
|
||||
|
||||
|
||||
class FirewallSetupResponseCreatedPayload(PayloadBase):
|
||||
"""
|
||||
Payload for the Firewall Setup Response Created event.
|
||||
This captures the user's choice to install the firewall (Y/N).
|
||||
"""
|
||||
user_consent_requested: bool = Field(
|
||||
description="Whether the user was asked for consent to install the firewall"
|
||||
)
|
||||
user_consent: Optional[bool] = Field(
|
||||
default=None, description="User's consent to install the firewall (True for yes, False for no, None if unknown)"
|
||||
)
|
||||
|
||||
|
||||
class FirewallSetupCompletedPayload(PayloadBase):
|
||||
"""
|
||||
Payload for the Firewall Setup Completed event.
|
||||
This is emitted when the firewall is configured. This payload has the current status of all tools.
|
||||
"""
|
||||
tools: List[ToolStatus] = Field(
|
||||
description="Status of all configured package manager tools"
|
||||
)
|
||||
|
||||
|
||||
class DependencyFile(BaseModel):
|
||||
"""
|
||||
Information about a detected dependency file.
|
||||
"""
|
||||
file_path: str = Field(description="Path to the detected dependency file")
|
||||
|
||||
|
||||
class CodebaseDetectionStatusPayload(PayloadBase):
|
||||
"""
|
||||
Payload for the Codebase Detection Status event.
|
||||
This is emitted when the codebase is detected.
|
||||
"""
|
||||
detected: bool = Field(description="Whether a codebase was detected")
|
||||
dependency_files: Optional[List[DependencyFile]] = Field(
|
||||
default=None, description="List of detected dependency files"
|
||||
)
|
||||
|
||||
|
||||
class CodebaseSetupResponseCreatedPayload(PayloadBase):
|
||||
"""
|
||||
Payload for the Codebase Setup Response Created event.
|
||||
This captures the user's choice to add a codebase (Y/N).
|
||||
"""
|
||||
user_consent_requested: bool = Field(
|
||||
description="Whether the user was asked for consent to add a codebase"
|
||||
)
|
||||
user_consent: Optional[bool] = Field(
|
||||
default=None, description="User's consent to add a codebase (True for yes, False for no, None if unknown)"
|
||||
)
|
||||
|
||||
|
||||
class CodebaseSetupCompletedPayload(PayloadBase):
|
||||
"""
|
||||
Payload for the Codebase Setup Completed event.
|
||||
This is emitted when a codebase is successfully created or verified.
|
||||
"""
|
||||
is_created: bool = Field(description="Whether the codebase was created")
|
||||
codebase_id: Optional[str] = Field(default=None, description="ID of the codebase")
|
||||
|
||||
|
||||
class InitScanCompletedPayload(PayloadBase):
|
||||
"""
|
||||
Payload for the Init Scan Completed event.
|
||||
This is emitted when the initial scan completes.
|
||||
"""
|
||||
scan_id: Optional[str] = Field(default=None, description="ID of the completed scan")
|
||||
|
||||
|
||||
class InitExitStep(str, Enum):
|
||||
"""
|
||||
Possible steps where the init process could be exited.
|
||||
"""
|
||||
PRE_AUTH = "pre_authentication"
|
||||
POST_AUTH = "post_authentication"
|
||||
PRE_FIREWALL_SETUP = "pre_firewall_setup"
|
||||
POST_FIREWALL_SETUP = "post_firewall_setup"
|
||||
PRE_CODEBASE_SETUP = "pre_codebase_setup"
|
||||
POST_CODEBASE_SETUP = "post_codebase_setup"
|
||||
PRE_SCAN = "pre_scan"
|
||||
POST_SCAN = "post_scan"
|
||||
COMPLETED = "completed"
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
|
||||
class InitExitedPayload(PayloadBase):
|
||||
"""
|
||||
Payload for the Init Exited event.
|
||||
This is emitted when the user exits the init process (e.g., via Ctrl+C).
|
||||
"""
|
||||
exit_step: InitExitStep = Field(
|
||||
description="The last step known before the user exited"
|
||||
)
|
||||
@@ -0,0 +1,187 @@
|
||||
from enum import Enum
|
||||
from functools import partial
|
||||
from typing import Any, Optional, Union
|
||||
from pydantic import BeforeValidator
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from .constants import CLI_SOURCE, GITHUB, ACTION, PYPI, DOCKER, APP
|
||||
|
||||
|
||||
class SourceType(str, Enum):
|
||||
"""
|
||||
Define the source types using URN format for product identification.
|
||||
"""
|
||||
|
||||
SAFETY_CLI_GITHUB_ACTION = f"{CLI_SOURCE}:{GITHUB}:{ACTION}"
|
||||
SAFETY_CLI_PYPI = f"{CLI_SOURCE}:{PYPI}"
|
||||
SAFETY_CLI_DOCKER = f"{CLI_SOURCE}:{DOCKER}"
|
||||
SAFETY_CLI_GITHUB_APP = f"{CLI_SOURCE}:{GITHUB}:{APP}"
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
"""
|
||||
Return a human-readable description for this source type.
|
||||
"""
|
||||
descriptions = {
|
||||
self.SAFETY_CLI_GITHUB_ACTION: "Safety CLI via GitHub Action",
|
||||
self.SAFETY_CLI_PYPI: "Safety CLI via Python Package Index (PyPI)",
|
||||
self.SAFETY_CLI_DOCKER: "Safety CLI via Docker",
|
||||
self.SAFETY_CLI_GITHUB_APP: "Safety CLI via GitHub App",
|
||||
}
|
||||
return descriptions[self]
|
||||
|
||||
@classmethod
|
||||
def choices(cls):
|
||||
"""
|
||||
Return this Enum as choices format (value, display_name).
|
||||
"""
|
||||
return [(item.value, item.description) for item in cls]
|
||||
|
||||
|
||||
class EventTypeBase(str, Enum):
|
||||
"""
|
||||
Base class for all event types
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class EventType(EventTypeBase):
|
||||
"""
|
||||
Enumeration for different types of events.
|
||||
"""
|
||||
|
||||
COMMAND_ERROR = "com.safetycli.command.error"
|
||||
COMMAND_EXECUTED = "com.safetycli.command.executed"
|
||||
TOOL_COMMAND_EXECUTED = "com.safetycli.tool.command.executed"
|
||||
PACKAGE_INSTALLED = "com.safetycli.package.installed"
|
||||
PACKAGE_UPDATED = "com.safetycli.package.updated"
|
||||
PACKAGE_UNINSTALLED = "com.safetycli.package.uninstalled"
|
||||
PACKAGE_BLOCKED = "com.safetycli.package.blocked"
|
||||
FIREWALL_HEARTBEAT = "com.safetycli.firewall.heartbeat"
|
||||
FIREWALL_CONFIGURED = "com.safetycli.firewall.configured"
|
||||
FIREWALL_DISABLED = "com.safetycli.firewall.disabled"
|
||||
|
||||
INIT_STARTED = "com.safetycli.init.started"
|
||||
AUTH_STARTED = "com.safetycli.auth.started"
|
||||
AUTH_COMPLETED = "com.safetycli.auth.completed"
|
||||
FIREWALL_SETUP_RESPONSE_CREATED = "com.safetycli.firewall.setup.response.created"
|
||||
FIREWALL_SETUP_COMPLETED = "com.safetycli.firewall.setup.completed"
|
||||
CODEBASE_DETECTION_STATUS = "com.safetycli.codebase.detection.status"
|
||||
CODEBASE_SETUP_RESPONSE_CREATED = "com.safetycli.codebase.setup.response.created"
|
||||
CODEBASE_SETUP_COMPLETED = "com.safetycli.codebase.setup.completed"
|
||||
INIT_SCAN_COMPLETED = "com.safetycli.init.scan.completed"
|
||||
INIT_EXITED = "com.safetycli.init.exited"
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
"""
|
||||
Return a human-readable description for this event type.
|
||||
"""
|
||||
descriptions = {
|
||||
self.COMMAND_ERROR: "Command Error",
|
||||
self.COMMAND_EXECUTED: "Command Executed",
|
||||
self.TOOL_COMMAND_EXECUTED: "Tool Command Executed",
|
||||
self.PACKAGE_INSTALLED: "Package Installed",
|
||||
self.PACKAGE_UPDATED: "Package Updated",
|
||||
self.PACKAGE_UNINSTALLED: "Package Uninstalled",
|
||||
self.PACKAGE_BLOCKED: "Package Blocked",
|
||||
self.FIREWALL_HEARTBEAT: "Firewall Heartbeat",
|
||||
self.FIREWALL_CONFIGURED: "Firewall Configured",
|
||||
self.FIREWALL_DISABLED: "Firewall Disabled",
|
||||
self.INIT_STARTED: "Init Started",
|
||||
self.AUTH_STARTED: "Auth Started",
|
||||
self.AUTH_COMPLETED: "Auth Completed",
|
||||
self.FIREWALL_SETUP_RESPONSE_CREATED: "Firewall Setup Response Created",
|
||||
self.FIREWALL_SETUP_COMPLETED: "Firewall Setup Completed",
|
||||
self.CODEBASE_DETECTION_STATUS: "Codebase Detection Status",
|
||||
self.CODEBASE_SETUP_RESPONSE_CREATED: "Codebase Setup Response Created",
|
||||
self.CODEBASE_SETUP_COMPLETED: "Codebase Setup Completed",
|
||||
self.INIT_SCAN_COMPLETED: "Init Scan Completed",
|
||||
self.INIT_EXITED: "Init Exited",
|
||||
}
|
||||
return descriptions[self]
|
||||
|
||||
@classmethod
|
||||
def choices(cls):
|
||||
"""
|
||||
Return this Enum as choices format (value, display_name).
|
||||
"""
|
||||
return [(item.value, item.description) for item in cls]
|
||||
|
||||
|
||||
class ParamSource(str, Enum):
|
||||
"""
|
||||
Matches Click's parameter sources
|
||||
"""
|
||||
|
||||
COMMANDLINE = "commandline"
|
||||
ENVIRONMENT = "environment"
|
||||
CONFIG = "config"
|
||||
DEFAULT = "default"
|
||||
PROMPT = "prompt"
|
||||
|
||||
# Useful for tracking when we couldn't determine the source
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
|
||||
class ToolType(str, Enum):
|
||||
"""
|
||||
Supported tools.
|
||||
"""
|
||||
|
||||
PIP = "pip"
|
||||
POETRY = "poetry"
|
||||
UV = "uv"
|
||||
CONDA = "conda"
|
||||
NPM = "npm"
|
||||
|
||||
|
||||
DEFAULT_MAX_BYTES: int = 32 * 1024 # 32 KB
|
||||
DEFAULT_ENCODING = "utf-8"
|
||||
|
||||
|
||||
def truncate_by_chars(
|
||||
value: Union[str, bytes, Any],
|
||||
max_chars: int,
|
||||
encoding: str = DEFAULT_ENCODING
|
||||
) -> str:
|
||||
"""
|
||||
Truncates a value to a maximum number of characters.
|
||||
"""
|
||||
# Convert to string if needed
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode(encoding, errors="replace")
|
||||
elif not isinstance(value, str):
|
||||
value = str(value)
|
||||
|
||||
return value[:max_chars]
|
||||
|
||||
|
||||
def truncate_by_bytes(
|
||||
value: Union[str, bytes, Any],
|
||||
max_bytes: int,
|
||||
encoding: str = DEFAULT_ENCODING
|
||||
) -> str:
|
||||
"""
|
||||
Truncates a value to a maximum byte size.
|
||||
"""
|
||||
# Convert to bytes if needed
|
||||
if not isinstance(value, bytes):
|
||||
value = str(value).encode(encoding, errors="replace")
|
||||
|
||||
# Truncate and convert back to string
|
||||
return value[:max_bytes].decode(encoding, errors="ignore")
|
||||
|
||||
|
||||
StdOut = Annotated[
|
||||
str, BeforeValidator(partial(truncate_by_bytes, max_bytes=DEFAULT_MAX_BYTES))
|
||||
]
|
||||
StdErr = Annotated[
|
||||
str, BeforeValidator(partial(truncate_by_bytes, max_bytes=DEFAULT_MAX_BYTES))
|
||||
]
|
||||
StackTrace = Annotated[
|
||||
str, BeforeValidator(partial(truncate_by_bytes, max_bytes=DEFAULT_MAX_BYTES))
|
||||
]
|
||||
|
||||
LimitedStr = Annotated[str, BeforeValidator(partial(truncate_by_chars, max_chars=200))]
|
||||
@@ -0,0 +1,142 @@
|
||||
from dataclasses import asdict, field
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from .util import dict_dump
|
||||
from ..report.schemas.v3_0 import main as v3_0
|
||||
from .base import FileType, SafetyBaseModel
|
||||
from .package import PythonDependency, PythonSpecification
|
||||
from .result import DependencyResultModel
|
||||
from .vulnerability import ClosestSecureVersion, RemediationModel, Vulnerability
|
||||
|
||||
|
||||
@dataclass
|
||||
class FileModel(SafetyBaseModel):
|
||||
location: Path
|
||||
file_type: FileType
|
||||
results: DependencyResultModel = field(
|
||||
default_factory=lambda: DependencyResultModel(dependencies=[])
|
||||
)
|
||||
|
||||
def as_v30(self) -> v3_0.File:
|
||||
dependencies_output = []
|
||||
|
||||
for dep in self.results.dependencies:
|
||||
specs: List[v3_0.AnalyzedSpecification] = []
|
||||
for specification in dep.specifications:
|
||||
rem = None
|
||||
|
||||
if specification.remediation:
|
||||
closest = None
|
||||
if specification.remediation.closest_secure:
|
||||
closest_kwargs = asdict(
|
||||
specification.remediation.closest_secure
|
||||
)
|
||||
closest = v3_0.ClosestSecureSpecification(**closest_kwargs)
|
||||
|
||||
rem = v3_0.Remediation(
|
||||
vulnerabilities_found=specification.remediation.vulnerabilities_found,
|
||||
closest_secure=closest,
|
||||
recommended=specification.remediation.recommended,
|
||||
other_recommended=specification.remediation.other_recommended,
|
||||
)
|
||||
|
||||
analyzed = v3_0.AnalyzedSpecification(
|
||||
raw=specification.raw,
|
||||
vulnerabilities=v3_0.SpecificationVulnerabilities(
|
||||
known_vulnerabilities=[
|
||||
v3_0.Vulnerability(**vuln.to_model_dict())
|
||||
for vuln in specification.vulnerabilities
|
||||
],
|
||||
remediation=rem,
|
||||
),
|
||||
)
|
||||
specs.append(analyzed)
|
||||
|
||||
p = v3_0.Package(name=dep.name, specifications=specs)
|
||||
dependencies_output.append(p)
|
||||
|
||||
return v3_0.File(
|
||||
location=str(self.location),
|
||||
type=self.file_type.value,
|
||||
categories=[self.file_type.ecosystem.value],
|
||||
results=v3_0.Results(dependencies=dependencies_output),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_v30(cls, obj: v3_0.SchemaModelV30) -> 'FileModel':
|
||||
if not isinstance(obj, v3_0.File):
|
||||
raise TypeError('Expected instance of v3_0.File')
|
||||
|
||||
location = Path(obj.location)
|
||||
|
||||
dependencies: List[PythonDependency] = []
|
||||
|
||||
for dep in obj.results.dependencies:
|
||||
specs: List[PythonSpecification] = []
|
||||
for specification in dep.specifications:
|
||||
remediation_obj = None
|
||||
remed = specification.vulnerabilities.remediation
|
||||
if remed:
|
||||
closest_sec = None
|
||||
|
||||
if remed.closest_secure:
|
||||
closest_sec = ClosestSecureVersion(
|
||||
**dict_dump(remed.closest_secure)
|
||||
)
|
||||
|
||||
remediation_obj = RemediationModel(
|
||||
vulnerabilities_found=remed.vulnerabilities_found,
|
||||
more_info_url="",
|
||||
recommended=remed.recommended,
|
||||
closest_secure=closest_sec,
|
||||
other_recommended=remed.other_recommended,
|
||||
)
|
||||
|
||||
vulns: List[Vulnerability] = []
|
||||
|
||||
py_spec = PythonSpecification(specification.raw, found=location)
|
||||
|
||||
for vuln in specification.vulnerabilities.known_vulnerabilities:
|
||||
ignored = False
|
||||
ignored_expires = None
|
||||
ignored_reason = None
|
||||
ignored_code = None
|
||||
|
||||
if vuln.ignored:
|
||||
ignored = True
|
||||
ignored_expires = vuln.ignored.expires
|
||||
ignored_reason = vuln.ignored.reason
|
||||
ignored_code = vuln.ignored.code
|
||||
|
||||
vulns.append(
|
||||
Vulnerability(
|
||||
vulnerability_id=vuln.id,
|
||||
package_name=py_spec.name,
|
||||
ignored=ignored,
|
||||
ignored_reason=ignored_reason,
|
||||
ignored_expires=ignored_expires,
|
||||
ignored_code=ignored_code,
|
||||
vulnerable_spec=vuln.vulnerable_spec,
|
||||
)
|
||||
)
|
||||
|
||||
py_spec.remediation = remediation_obj
|
||||
py_spec.vulnerabilities = vulns
|
||||
specs.append(py_spec)
|
||||
|
||||
version = PythonDependency.find_version(specifications=specs)
|
||||
dependencies.append(
|
||||
PythonDependency(
|
||||
name=dep.name, version=version, specifications=specs, found=location
|
||||
)
|
||||
)
|
||||
|
||||
results = DependencyResultModel(dependencies=dependencies)
|
||||
|
||||
return FileModel(
|
||||
location=location, file_type=FileType(obj.type), results=results
|
||||
)
|
||||
@@ -0,0 +1,12 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class GITModel:
|
||||
branch: Optional[str] = None
|
||||
tag: Optional[str] = None
|
||||
commit: Optional[str] = None
|
||||
dirty: Optional[bool] = None
|
||||
origin: Optional[str] = None
|
||||
@@ -0,0 +1,86 @@
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Union
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from ..report.schemas.v3_0 import main as v3_0
|
||||
from .base import (
|
||||
AuthenticationType,
|
||||
ReportSchemaVersion,
|
||||
SafetyBaseModel,
|
||||
ScanType,
|
||||
Stage,
|
||||
)
|
||||
from .telemetry import TelemetryModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetadataModel(SafetyBaseModel):
|
||||
"""
|
||||
Main data about the report, used for traceability purposes.
|
||||
"""
|
||||
|
||||
scan_type: ScanType
|
||||
stage: Stage
|
||||
scan_locations: List[Path]
|
||||
authenticated: bool
|
||||
authentication_type: AuthenticationType
|
||||
telemetry: TelemetryModel
|
||||
schema_version: ReportSchemaVersion
|
||||
timestamp: datetime = datetime.now()
|
||||
|
||||
def as_v30(self, *args: Any, **kwargs: Any) -> v3_0.SchemaModelV30:
|
||||
auth_method = None
|
||||
|
||||
if self.authentication_type is AuthenticationType.API_KEY:
|
||||
auth_method = v3_0.AuthenticationMethod.api_key
|
||||
elif self.authentication_type is AuthenticationType.TOKEN:
|
||||
auth_method = v3_0.AuthenticationMethod.token
|
||||
|
||||
return v3_0.Meta(
|
||||
scan_type=v3_0.ScanType(self.scan_type.value),
|
||||
stage=v3_0.StageType(self.stage.value),
|
||||
scan_locations=[str(location) for location in self.scan_locations],
|
||||
authenticated=self.authenticated,
|
||||
authentication_method=auth_method,
|
||||
timestamp=self.timestamp,
|
||||
telemetry=self.telemetry.as_v30(),
|
||||
schema_version=self.schema_version.value,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_v30(cls, obj: Union[v3_0.SchemaModelV30, dict]) -> "MetadataModel":
|
||||
# Allow obj to be a dict or an instance of v3_0.Meta.
|
||||
if isinstance(obj, dict):
|
||||
# Check for a missing authentication_method and set the default.
|
||||
auth_value = obj.get("authentication_method")
|
||||
if auth_value is None:
|
||||
auth_value = "api_key" if obj.get("api_key", False) else "token"
|
||||
obj["authentication_method"] = auth_value
|
||||
|
||||
# Create a v3_0.Meta instance from the dict.
|
||||
meta_obj = (
|
||||
v3_0.Meta.model_validate(obj)
|
||||
if hasattr(v3_0.Meta, "model_validate")
|
||||
else v3_0.Meta(**obj)
|
||||
)
|
||||
else:
|
||||
meta_obj = obj
|
||||
auth_value = meta_obj.authentication_method
|
||||
if auth_value is None:
|
||||
auth_value = (
|
||||
"api_key" if getattr(meta_obj, "api_key", False) else "token"
|
||||
)
|
||||
|
||||
return MetadataModel(
|
||||
scan_type=ScanType(meta_obj.scan_type.value),
|
||||
stage=Stage(meta_obj.stage.value),
|
||||
scan_locations=[Path(location) for location in meta_obj.scan_locations],
|
||||
authenticated=meta_obj.authenticated,
|
||||
authentication_type=AuthenticationType(auth_value),
|
||||
telemetry=TelemetryModel.from_v30(meta_obj.telemetry),
|
||||
schema_version=ReportSchemaVersion(meta_obj.schema_version),
|
||||
timestamp=meta_obj.timestamp,
|
||||
)
|
||||
@@ -0,0 +1,137 @@
|
||||
import abc
|
||||
from dataclasses import field
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from .specification import PythonSpecification
|
||||
|
||||
NOT_IMPLEMENTED_ERROR_MSG = (
|
||||
"Needs implementation for the specific " "specification type."
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Dependency:
|
||||
name: str
|
||||
version: Optional[str]
|
||||
specifications: List[PythonSpecification]
|
||||
found: Optional[Path] = None
|
||||
absolute_path: Optional[Path] = None
|
||||
insecure_versions: List[str] = field(default_factory=lambda: [])
|
||||
secure_versions: List[str] = field(default_factory=lambda: [])
|
||||
latest_version_without_known_vulnerabilities: Optional[str] = None
|
||||
latest_version: Optional[str] = None
|
||||
more_info_url: Optional[str] = None
|
||||
|
||||
def has_unpinned_specification(self):
|
||||
for specification in self.specifications:
|
||||
if not specification.is_pinned():
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_unpinned_specificaitons(self):
|
||||
return filter(
|
||||
lambda specification: not specification.is_pinned(), self.specifications
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
def filter_by_supported_versions(self, versions: List[str]) -> List[str]:
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_versions(self, db_full):
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def refresh_from(self, db_full):
|
||||
raise NotImplementedError()
|
||||
|
||||
def to_dict(self, **kwargs):
|
||||
if kwargs.get("short_version", False):
|
||||
return {
|
||||
"name": self.name,
|
||||
"version": self.version,
|
||||
"requirements": self.specifications,
|
||||
}
|
||||
|
||||
return {
|
||||
"name": self.name,
|
||||
"version": self.version,
|
||||
"requirements": self.specifications,
|
||||
"found": None,
|
||||
"insecure_versions": self.insecure_versions,
|
||||
"secure_versions": self.secure_versions,
|
||||
"latest_version_without_known_vulnerabilities": self.latest_version_without_known_vulnerabilities, # noqa: E501
|
||||
"latest_version": self.latest_version,
|
||||
"more_info_url": self.more_info_url,
|
||||
}
|
||||
|
||||
def update(self, new):
|
||||
for key, value in new.items():
|
||||
if hasattr(self, key):
|
||||
setattr(self, key, value)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PythonDependency(Dependency):
|
||||
def filter_by_supported_versions(self, versions: List[str]) -> List[str]:
|
||||
from packaging.version import parse as parse_version
|
||||
|
||||
allowed = []
|
||||
|
||||
for version in versions:
|
||||
try:
|
||||
parse_version(version)
|
||||
allowed.append(version)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return allowed
|
||||
|
||||
def get_versions(self, db_full):
|
||||
pkg_meta = db_full.get("meta", {}).get("packages", {}).get(self.name, {})
|
||||
versions = self.filter_by_supported_versions(
|
||||
pkg_meta.get("insecure_versions", []) + pkg_meta.get("secure_versions", [])
|
||||
)
|
||||
return set(versions)
|
||||
|
||||
def refresh_from(self, db_full):
|
||||
from packaging.utils import canonicalize_name
|
||||
|
||||
base_domain = db_full.get("meta", {}).get("base_domain")
|
||||
pkg_meta = (
|
||||
db_full.get("meta", {})
|
||||
.get("packages", {})
|
||||
.get(canonicalize_name(self.name), {})
|
||||
)
|
||||
|
||||
kwargs = {
|
||||
"insecure_versions": self.filter_by_supported_versions(
|
||||
pkg_meta.get("insecure_versions", [])
|
||||
),
|
||||
"secure_versions": self.filter_by_supported_versions(
|
||||
pkg_meta.get("secure_versions", [])
|
||||
),
|
||||
"latest_version_without_known_vulnerabilities": pkg_meta.get(
|
||||
"latest_secure_version", None
|
||||
),
|
||||
"latest_version": pkg_meta.get("latest_version", None),
|
||||
"more_info_url": f"{base_domain}{pkg_meta.get('more_info_path', '')}",
|
||||
}
|
||||
|
||||
self.update(kwargs)
|
||||
|
||||
@staticmethod
|
||||
def find_version(specifications: List[PythonSpecification]) -> Optional[str]:
|
||||
ver = None
|
||||
|
||||
if len(specifications) != 1:
|
||||
return ver
|
||||
|
||||
specification = specifications[0]
|
||||
if specification.is_pinned():
|
||||
ver = next(iter(specification.specifier)).version
|
||||
|
||||
return ver
|
||||
@@ -0,0 +1,42 @@
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from ..report.schemas.v3_0 import main as v3_0
|
||||
from .base import PolicySource, SafetyBaseModel
|
||||
from .config import ConfigModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class PolicyFileModel(SafetyBaseModel):
|
||||
id: str
|
||||
source: PolicySource
|
||||
location: Optional[Path]
|
||||
config: Optional[ConfigModel] = None
|
||||
|
||||
def as_v30(self) -> v3_0.Policy:
|
||||
source_obj = (
|
||||
v3_0.PolicySource.local
|
||||
if self.source is PolicySource.local
|
||||
else v3_0.PolicySource.cloud
|
||||
)
|
||||
path = None
|
||||
|
||||
if self.location:
|
||||
path = str(self.location.resolve())
|
||||
|
||||
return v3_0.Policy(id=self.id, path=path, source=source_obj)
|
||||
|
||||
@classmethod
|
||||
def from_v30(cls, obj: v3_0.SchemaModelV30) -> 'PolicyFileModel':
|
||||
|
||||
if not isinstance(obj, v3_0.Policy):
|
||||
raise TypeError('Expected instance of v3_0.Policy')
|
||||
|
||||
file_location = Path(obj.path) if obj.path else None
|
||||
|
||||
return PolicyFileModel(
|
||||
id=obj.id, source=PolicySource(obj.source.value), location=file_location
|
||||
)
|
||||
@@ -0,0 +1,61 @@
|
||||
from dataclasses import asdict, field
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from ..report.schemas.v3_0 import main as v3_0
|
||||
from .base import SafetyBaseModel
|
||||
from .file import FileModel
|
||||
from .git import GITModel
|
||||
from .policy_file import PolicyFileModel
|
||||
from .util import dict_dump
|
||||
|
||||
@dataclass
|
||||
class ProjectModel(SafetyBaseModel):
|
||||
id: str
|
||||
upload_request_id: Optional[str] = None
|
||||
project_path: Optional[Path] = None
|
||||
name: Optional[str] = None
|
||||
url_path: Optional[str] = None
|
||||
policy: Optional[PolicyFileModel] = None
|
||||
git: Optional[GITModel] = None
|
||||
files: List[FileModel] = field(default_factory=lambda: [])
|
||||
|
||||
def as_v30(self, full: bool = True) -> Union[v3_0.Projects, v3_0.ProjectsScan]:
|
||||
if not full:
|
||||
if not self.upload_request_id:
|
||||
raise TypeError('upload_request_id is required when a single project is created')
|
||||
return v3_0.Projects(id=self.id, upload_request_id=self.upload_request_id)
|
||||
|
||||
if not self.project_path:
|
||||
raise TypeError('project_path is required when a project scan is created')
|
||||
|
||||
git_repr = v3_0.Git(**asdict(self.git)) if self.git else None
|
||||
policy = self.policy.as_v30() if self.policy else None
|
||||
location = str(self.project_path.resolve().parent)
|
||||
|
||||
return v3_0.ProjectsScan(id=self.id, policy=policy, git=git_repr, location=location, files=[f.as_v30() for f in self.files])
|
||||
|
||||
@classmethod
|
||||
def from_v30(cls, obj: v3_0.SchemaModelV30) -> 'ProjectModel':
|
||||
|
||||
if not isinstance(obj, v3_0.ProjectsScan) and not isinstance(obj, v3_0.Projects):
|
||||
raise TypeError('Expected instance of v3_0.ProjectsScan or v3_0.Projects')
|
||||
|
||||
if isinstance(obj, v3_0.ProjectsScan):
|
||||
git_model_inst = None
|
||||
|
||||
if obj.git:
|
||||
git_model_inst = GITModel(**dict_dump(obj.git))
|
||||
|
||||
return ProjectModel(
|
||||
id=obj.id,
|
||||
project_path=Path(obj.location),
|
||||
upload_request_id=obj.upload_request_id,
|
||||
git=git_model_inst,
|
||||
files=[FileModel.from_v30(f) for f in obj.files],
|
||||
)
|
||||
|
||||
return ProjectModel(id=obj.id, upload_request_id=obj.upload_request_id)
|
||||
@@ -0,0 +1,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .base import SafetyBaseModel
|
||||
|
||||
from ..report.schemas.v3_0 import main as v3_0
|
||||
|
||||
NOT_IMPLEMENTED_ERROR_MSG = (
|
||||
"Needs implementation for the specific " "schema version export."
|
||||
)
|
||||
|
||||
|
||||
class ReportConvertible(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def as_v30(self, *args, **kwargs) -> v3_0.SchemaModelV30:
|
||||
raise NotImplementedError(NOT_IMPLEMENTED_ERROR_MSG)
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def from_v30(cls, obj: v3_0.SchemaModelV30) -> SafetyBaseModel:
|
||||
raise NotImplementedError(NOT_IMPLEMENTED_ERROR_MSG)
|
||||
@@ -0,0 +1,43 @@
|
||||
from dataclasses import field
|
||||
from typing import Dict, List
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from .base import IgnoreCodes, IgnoredItems
|
||||
from .package import PythonDependency
|
||||
from .specification import PythonSpecification
|
||||
from .vulnerability import Vulnerability
|
||||
|
||||
|
||||
def not_ignored(vuln):
|
||||
return not vuln.ignored
|
||||
|
||||
|
||||
@dataclass
|
||||
class DependencyResultModel:
|
||||
dependencies: List[PythonDependency]
|
||||
ignored_vulns: IgnoredItems = field(default_factory=lambda: IgnoredItems({}))
|
||||
ignored_vulns_data: Dict[str, Vulnerability] = field(default_factory=lambda: {})
|
||||
|
||||
failed: bool = False
|
||||
|
||||
def get_affected_specifications(
|
||||
self, include_ignored: bool = False
|
||||
) -> List[PythonSpecification]:
|
||||
affected = []
|
||||
for dep in self.dependencies:
|
||||
affected += [
|
||||
spec
|
||||
for spec in dep.specifications
|
||||
if (any(spec.vulnerabilities) if include_ignored else any(filter(not_ignored, spec.vulnerabilities)))
|
||||
]
|
||||
return affected
|
||||
|
||||
def get_affected_dependencies(self) -> List[PythonDependency]:
|
||||
affected = []
|
||||
for dep in self.dependencies:
|
||||
for spec in dep.specifications:
|
||||
if any(filter(not_ignored, spec.vulnerabilities)):
|
||||
affected.append(dep)
|
||||
break
|
||||
return affected
|
||||
@@ -0,0 +1,105 @@
|
||||
import importlib
|
||||
from dataclasses import field
|
||||
from pathlib import Path
|
||||
from typing import List, Union
|
||||
|
||||
from pydantic import ValidationError
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from ..report.schemas.v3_0 import main as v3_0
|
||||
from .base import ReportSchemaVersion, SafetyBaseModel, ScanType
|
||||
from .file import FileModel
|
||||
from .metadata import MetadataModel
|
||||
from .project import ProjectModel
|
||||
from .telemetry import TelemetryModel
|
||||
|
||||
from pydantic.version import VERSION as PYDANTIC_VERSION
|
||||
|
||||
def version_validator(func):
|
||||
if PYDANTIC_VERSION.startswith("1."):
|
||||
from pydantic import validator
|
||||
return validator('verison', pre=True, always=True)
|
||||
else:
|
||||
from pydantic import field_validator
|
||||
return field_validator('verison', mode='before')
|
||||
|
||||
@dataclass
|
||||
class ReportModel(SafetyBaseModel):
|
||||
"""
|
||||
Used as an entrypoint to keep backwards compatibility with old formats.
|
||||
Use this model if you want to generate a standard JSON report.
|
||||
"""
|
||||
|
||||
DEFAULT_SCHEMA_VERSION = ReportSchemaVersion.v3_0
|
||||
|
||||
telemetry: TelemetryModel
|
||||
metadata: MetadataModel
|
||||
files: List[FileModel]
|
||||
projects: List[ProjectModel] = field(default_factory=lambda: [])
|
||||
version: ReportSchemaVersion = DEFAULT_SCHEMA_VERSION
|
||||
|
||||
@version_validator
|
||||
def validate_version(cls, version: ReportSchemaVersion) -> ReportSchemaVersion:
|
||||
versions = list(ReportSchemaVersion)
|
||||
if version not in (versions):
|
||||
raise ValueError(f"Invalid version, allowed versions are {versions}")
|
||||
return version
|
||||
|
||||
def as_v30(self) -> v3_0.Report:
|
||||
full = self.metadata.scan_type is ScanType.scan
|
||||
|
||||
projects_data = [p.as_v30(full=full) for p in self.projects]
|
||||
|
||||
if not all(isinstance(p, type(projects_data[0])) for p in projects_data):
|
||||
raise ValueError("All objects in projects_data must be of the same type")
|
||||
|
||||
results = v3_0.ScanResults(
|
||||
files=[f.as_v30() for f in self.files],
|
||||
projects=projects_data, # type: ignore
|
||||
)
|
||||
|
||||
meta_data = self.metadata.as_v30()
|
||||
|
||||
if not isinstance(meta_data, v3_0.Meta) or not isinstance(results, v3_0.ScanResults):
|
||||
raise TypeError("Expected instance of v3_0.Meta and v3_0.ScanResults")
|
||||
|
||||
report = v3_0.Report(meta=meta_data, scan_results=results)
|
||||
|
||||
return report
|
||||
|
||||
@classmethod
|
||||
def from_v30(cls, obj: v3_0.SchemaModelV30) -> 'ReportModel':
|
||||
|
||||
if not isinstance(obj, v3_0.Report):
|
||||
raise TypeError('Expected instance of v3_0.Report')
|
||||
|
||||
return ReportModel(
|
||||
version=ReportSchemaVersion(obj.meta.schema_version),
|
||||
telemetry=TelemetryModel.from_v30(obj.meta.telemetry),
|
||||
metadata=MetadataModel.from_v30(obj.meta),
|
||||
projects=[ProjectModel.from_v30(p) for p in obj.scan_results.projects],
|
||||
files=[FileModel.from_v30(f) for f in obj.scan_results.files],
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def parse_report(
|
||||
cls, raw_report: Union[str, Path], schema: ReportSchemaVersion
|
||||
) -> Union['ReportModel', ValidationError]:
|
||||
parse = "parse_raw"
|
||||
|
||||
if isinstance(raw_report, Path):
|
||||
raw_report = raw_report.expanduser().resolve()
|
||||
parse = "parse_file"
|
||||
|
||||
target_schema = schema.value.replace(".", "_")
|
||||
module_name = "safety_schemas.report.schemas." f"v{target_schema}.main"
|
||||
|
||||
module = importlib.import_module(module_name)
|
||||
report_model = module.Report
|
||||
|
||||
# This will raise a validation error if the content is wrong
|
||||
validated_report = getattr(report_model, parse)(raw_report)
|
||||
|
||||
# TODO: Select the from from the version passed
|
||||
return ReportModel.from_v30(obj=validated_report)
|
||||
@@ -0,0 +1,158 @@
|
||||
import abc
|
||||
from dataclasses import InitVar, field
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from dparse import filetypes as parse_strategy
|
||||
from dparse import parse as parse_specification
|
||||
from dparse.dependencies import Dependency as ParsedDependency
|
||||
from packaging.requirements import Requirement
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from pydantic import VERSION as pydantic_version
|
||||
from pydantic import Field
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Annotated, ClassVar
|
||||
|
||||
try:
|
||||
from pydantic_core import ArgsKwargs
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from .vulnerability import RemediationModel, Vulnerability
|
||||
|
||||
NOT_IMPLEMENTED_ERROR_MSG = (
|
||||
"Needs implementation for the specific " "specification type."
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Specification(metaclass=abc.ABCMeta):
|
||||
raw: str
|
||||
found: Optional[Path]
|
||||
vulnerabilities: List[Vulnerability] = field(default_factory=lambda: [])
|
||||
remediation: Optional[RemediationModel] = None
|
||||
|
||||
@abc.abstractmethod
|
||||
def is_pinned(self) -> bool:
|
||||
raise NotImplementedError(NOT_IMPLEMENTED_ERROR_MSG)
|
||||
|
||||
@abc.abstractmethod
|
||||
def is_vulnerable(self, *args, **kwargs) -> bool:
|
||||
raise NotImplementedError(NOT_IMPLEMENTED_ERROR_MSG)
|
||||
|
||||
|
||||
def get_dep(specification: Union[str, ParsedDependency]):
|
||||
_dep = specification
|
||||
|
||||
if isinstance(specification, str):
|
||||
deps = parse_specification(
|
||||
specification, file_type=parse_strategy.requirements_txt
|
||||
).dependencies
|
||||
_dep = deps[0] if deps else None
|
||||
|
||||
if not isinstance(_dep, ParsedDependency):
|
||||
raise ValueError(
|
||||
f"The '{specification}' specification is "
|
||||
"not a valid Python specificaiton."
|
||||
)
|
||||
|
||||
return _dep
|
||||
|
||||
|
||||
@dataclass(config={"arbitrary_types_allowed": True})
|
||||
class PythonSpecification(Requirement, Specification):
|
||||
dep: ClassVar[Optional[ParsedDependency]] = Field(default=None, exclude=True)
|
||||
|
||||
def __load_req(self, specification: Union[str, ParsedDependency]):
|
||||
self.dep = get_dep(specification)
|
||||
|
||||
raw_line = self.dep.line
|
||||
to_parse = self.dep.line
|
||||
# Hash and comments are only a pip feature, so removing them.
|
||||
if "#" in to_parse:
|
||||
to_parse = self.dep.line.split("#")[0]
|
||||
|
||||
for req_hash in self.dep.hashes:
|
||||
to_parse = to_parse.replace(req_hash, "")
|
||||
|
||||
to_parse = to_parse.replace("\\", "").rstrip()
|
||||
|
||||
try:
|
||||
# Try to build a PEP Requirement from the cleaned line
|
||||
super().__init__(to_parse)
|
||||
except Exception:
|
||||
raise ValueError(
|
||||
f"The '{raw_line}' specification is "
|
||||
"not a valid Python specificaiton."
|
||||
)
|
||||
|
||||
if not pydantic_version.startswith("1."):
|
||||
from pydantic import model_validator
|
||||
|
||||
@model_validator(mode='before')
|
||||
def pre_root(cls, values):
|
||||
args, kwargs = values.args, values.kwargs
|
||||
|
||||
try:
|
||||
specification = args[0]
|
||||
except IndexError:
|
||||
raise ValueError('Specification is required')
|
||||
|
||||
_dep = get_dep(specification)
|
||||
|
||||
return ArgsKwargs((), {'raw': _dep.line, 'found': None if not kwargs else kwargs.get('found', None), 'dep': _dep})
|
||||
|
||||
def __post_init__(self):
|
||||
self.__load_req(specification=self.raw)
|
||||
else:
|
||||
def __init__(
|
||||
self, specification: Union[str, ParsedDependency], found: Optional[Path] = None
|
||||
) -> None:
|
||||
self.__load_req(specification=specification)
|
||||
self.raw = self.dep.line
|
||||
self.found = found
|
||||
|
||||
def __eq__(self, other):
|
||||
return str(self) == str(other)
|
||||
|
||||
def is_pinned(self) -> bool:
|
||||
if not self.specifier or len(self.specifier) != 1:
|
||||
return False
|
||||
|
||||
specifier = next(iter(self.specifier))
|
||||
|
||||
return (
|
||||
specifier.operator == "==" and "*" != specifier.version[-1]
|
||||
) or specifier.operator == "==="
|
||||
|
||||
def is_vulnerable(
|
||||
self, vulnerable_spec: SpecifierSet, insecure_versions: List[str]
|
||||
):
|
||||
if self.is_pinned():
|
||||
try:
|
||||
return vulnerable_spec.contains(next(iter(self.specifier)).version, prereleases=True)
|
||||
except Exception:
|
||||
# Ugly for now...
|
||||
return False
|
||||
|
||||
return any(
|
||||
self.specifier.filter(
|
||||
vulnerable_spec.filter(insecure_versions, prereleases=True),
|
||||
prereleases=True,
|
||||
)
|
||||
)
|
||||
|
||||
def to_dict(self, **kwargs):
|
||||
specifier_obj = self.specifier
|
||||
if "specifier_obj" not in kwargs:
|
||||
specifier_obj = str(self.specifier)
|
||||
|
||||
return {
|
||||
"raw": self.raw,
|
||||
"extras": list(self.extras),
|
||||
"marker": str(self.marker) if self.marker else None,
|
||||
"name": self.name,
|
||||
"specifier": specifier_obj,
|
||||
"url": self.url,
|
||||
"found": self.found,
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
from typing import Dict, Optional
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from ..report.schemas.v3_0 import main as v3_0
|
||||
from .base import SafetyBaseModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class TelemetryModel(SafetyBaseModel):
|
||||
"""
|
||||
Telemetry object generated per Safety report; this model holds data related to the
|
||||
client application running Safety CLI.
|
||||
"""
|
||||
|
||||
safety_options: Dict[str, Dict[str, int]]
|
||||
safety_version: str
|
||||
safety_source: str
|
||||
os_type: Optional[str] = None
|
||||
os_release: Optional[str] = None
|
||||
os_description: Optional[str] = None
|
||||
python_version: Optional[str] = None
|
||||
safety_command: Optional[str] = None
|
||||
|
||||
def as_v30(self) -> v3_0.Telemetry:
|
||||
return v3_0.Telemetry(
|
||||
os_type=self.os_type,
|
||||
os_release=self.os_release,
|
||||
os_description=self.os_description,
|
||||
python_version=self.python_version,
|
||||
safety_command=self.safety_command,
|
||||
safety_options=self.safety_options,
|
||||
safety_version=self.safety_version,
|
||||
safety_source=self.safety_source,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_v30(cls, obj: v3_0.SchemaModelV30) -> 'TelemetryModel':
|
||||
|
||||
if not isinstance(obj, v3_0.Telemetry):
|
||||
raise TypeError('Expected instance of v3_0.Telemetry')
|
||||
|
||||
return TelemetryModel(
|
||||
os_type=obj.os_type,
|
||||
os_release=obj.os_release,
|
||||
os_description=obj.os_description,
|
||||
python_version=obj.python_version,
|
||||
safety_command=obj.safety_command,
|
||||
safety_options=obj.safety_options,
|
||||
safety_version=obj.safety_version,
|
||||
safety_source=obj.safety_source,
|
||||
)
|
||||
@@ -0,0 +1,8 @@
|
||||
|
||||
from pydantic.version import VERSION as PYDANTIC_VERSION
|
||||
|
||||
def dict_dump(obj):
|
||||
if PYDANTIC_VERSION.startswith("1."):
|
||||
return obj.dict()
|
||||
|
||||
return obj.model_dump()
|
||||
@@ -0,0 +1,70 @@
|
||||
from dataclasses import field
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClosestSecureVersion:
|
||||
upper: Optional[str]
|
||||
lower: Optional[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class RemediationModel:
|
||||
vulnerabilities_found: int
|
||||
more_info_url: str
|
||||
recommended: Optional[str]
|
||||
closest_secure: Optional[ClosestSecureVersion] = None
|
||||
other_recommended: List[str] = field(default_factory=lambda: [])
|
||||
|
||||
|
||||
@dataclass
|
||||
class Vulnerability:
|
||||
vulnerability_id: str
|
||||
package_name: str
|
||||
ignored: bool
|
||||
vulnerable_spec: Any
|
||||
ignored_reason: Optional[str] = None
|
||||
ignored_expires: Optional[str] = None
|
||||
ignored_code: Optional[str] = None
|
||||
all_vulnerable_specs: Optional[List[str]] = None
|
||||
analyzed_version: Optional[str] = None
|
||||
analyzed_requirement: Optional[str] = None
|
||||
advisory: Optional[str] = None
|
||||
is_transitive: Optional[bool] = None
|
||||
published_date: Optional[str] = None
|
||||
fixed_versions: Optional[List[str]] = None
|
||||
closest_versions_without_known_vulnerabilities: Optional[List[str]] = None
|
||||
resources: Optional[List[str]] = None
|
||||
CVE: Optional[Any] = None
|
||||
severity: Optional[Any] = None
|
||||
affected_versions: Optional[List[str]] = None
|
||||
more_info_url: Optional[str] = None
|
||||
|
||||
def get_advisory(self):
|
||||
return (
|
||||
self.advisory.replace("\r", "")
|
||||
if self.advisory
|
||||
else "No advisory found for this vulnerability."
|
||||
)
|
||||
|
||||
def to_model_dict(self):
|
||||
try:
|
||||
affected_spec = next(iter(self.vulnerable_spec))
|
||||
except Exception:
|
||||
affected_spec = ""
|
||||
|
||||
repr = {
|
||||
"id": self.vulnerability_id,
|
||||
"vulnerable_spec": affected_spec,
|
||||
}
|
||||
|
||||
if self.ignored:
|
||||
repr["ignored"] = {
|
||||
"code": self.ignored_code,
|
||||
"reason": self.ignored_reason,
|
||||
"expires": self.ignored_expires,
|
||||
}
|
||||
|
||||
return repr
|
||||
Reference in New Issue
Block a user