This commit is contained in:
Iliyan Angelov
2025-12-01 06:50:10 +02:00
parent 91f51bc6fe
commit 62c1fe5951
4682 changed files with 544807 additions and 31208 deletions

View File

@@ -0,0 +1,110 @@
import logging
import sys
import json
from typing import Any, IO
import click
from dataclasses import dataclass
from safety.constants import CONTEXT_COMMAND_TYPE
from . import github
from safety.util import SafetyPolicyFile
from safety.scan.constants import CLI_ALERT_COMMAND_HELP
LOG = logging.getLogger(__name__)
def get_safety_cli_legacy_group():
from safety.cli_util import SafetyCLILegacyGroup
return SafetyCLILegacyGroup
def get_context_settings():
from safety.cli_util import CommandType
return {CONTEXT_COMMAND_TYPE: CommandType.UTILITY}
@dataclass
class Alert:
"""
Data class for storing alert details.
Attributes:
report (Any): The report data.
key (str): The API key for the safetycli.com vulnerability database.
policy (Any): The policy data.
requirements_files (Any): The requirements files data.
"""
report: Any
key: str
policy: Any = None
requirements_files: Any = None
@click.group(
cls=get_safety_cli_legacy_group(),
help=CLI_ALERT_COMMAND_HELP,
deprecated=True,
context_settings=get_context_settings(),
)
@click.option(
"--check-report",
help="JSON output of Safety Check to work with.",
type=click.File("r"),
default=sys.stdin,
required=True,
)
@click.option(
"--key",
envvar="SAFETY_API_KEY",
help="API Key for safetycli.com's vulnerability database. Can be set as SAFETY_API_KEY "
"environment variable.",
required=True,
)
@click.option(
"--policy-file",
type=SafetyPolicyFile(),
default=".safety-policy.yml",
help="Define the policy file to be used",
)
@click.pass_context
def alert(
ctx: click.Context, check_report: IO[str], policy_file: SafetyPolicyFile, key: str
) -> None:
"""
Command for processing the Safety Check JSON report.
Args:
ctx (click.Context): The Click context object.
check_report (IO[str]): The file containing the JSON report.
policy_file (SafetyPolicyFile): The policy file to be used.
key (str): The API key for the safetycli.com vulnerability database.
"""
LOG.info("alert started")
LOG.info(f"check_report is using stdin: {check_report == sys.stdin}")
with check_report:
# TODO: This breaks --help for subcommands
try:
safety_report = json.load(check_report)
except json.decoder.JSONDecodeError as e:
LOG.info("Error in the JSON report.")
click.secho("Error decoding input JSON: {}".format(e.msg), fg="red")
sys.exit(1)
if "report_meta" not in safety_report:
click.secho("You must pass in a valid Safety Check JSON report", fg="red")
sys.exit(1)
ctx.obj = Alert(
report=safety_report, policy=policy_file if policy_file else {}, key=key
)
# Adding subcommands for GitHub integration
alert.add_command(github.github_pr)
alert.add_command(github.github_issue)

View File

@@ -0,0 +1,575 @@
# type: ignore
import itertools
import logging
import re
import sys
from typing import Any, Optional
import click
try:
import github as pygithub
except ImportError:
pygithub = None
from packaging.specifiers import SpecifierSet
from packaging.utils import canonicalize_name
from . import utils, requirements
LOG = logging.getLogger(__name__)
def create_branch(repo: Any, base_branch: str, new_branch: str) -> None:
"""
Create a new branch in the given GitHub repository.
Args:
repo (Any): The GitHub repository object.
base_branch (str): The name of the base branch.
new_branch (str): The name of the new branch to create.
"""
ref = repo.get_git_ref("heads/" + base_branch)
repo.create_git_ref(ref="refs/heads/" + new_branch, sha=ref.object.sha)
def delete_branch(repo: Any, branch: str) -> None:
"""
Delete a branch from the given GitHub repository.
Args:
repo (Any): The GitHub repository object.
branch (str): The name of the branch to delete.
"""
ref = repo.get_git_ref(f"heads/{branch}")
ref.delete()
@click.command()
@click.option("--repo", help="GitHub standard repo path (eg, my-org/my-project)")
@click.option("--token", help="GitHub Access Token")
@click.option(
"--base-url",
help="Optional custom Base URL, if you're using GitHub enterprise",
default=None,
)
@click.pass_obj
@utils.require_files_report
def github_pr(obj: Any, repo: str, token: str, base_url: Optional[str]) -> None:
"""
Create a GitHub PR to fix any vulnerabilities using Safety's remediation data.
This is usually run by a GitHub action. If you're running this manually, ensure that your local repo is up to date and on HEAD - otherwise you'll see strange results.
Args:
obj (Any): The Click context object containing report data.
repo (str): The GitHub repository path.
token (str): The GitHub Access Token.
base_url (Optional[str]): Custom base URL for GitHub Enterprise, if applicable.
"""
if pygithub is None:
click.secho(
"pygithub is not installed. Did you install Safety with GitHub support? Try pip install safety[github]",
fg="red",
)
sys.exit(1)
# Load alert configurations from the policy
alert = obj.policy.get("alert", {}) or {}
security = alert.get("security", {}) or {}
config_pr = security.get("github-pr", {}) or {}
branch_prefix = config_pr.get("branch-prefix", "pyup/")
pr_prefix = config_pr.get("pr-prefix", "[PyUp] ")
assignees = config_pr.get("assignees", [])
labels = config_pr.get("labels", ["security"])
label_severity = config_pr.get("label-severity", True)
ignore_cvss_severity_below = config_pr.get("ignore-cvss-severity-below", 0)
ignore_cvss_unknown_severity = config_pr.get("ignore-cvss-unknown-severity", False)
# Authenticate with GitHub
gh = pygithub.Github(token, **({"base_url": base_url} if base_url else {}))
repo_name = repo
repo = gh.get_repo(repo)
try:
self_user = gh.get_user().login
except pygithub.GithubException:
# If we're using a token from an action (or integration) we can't call `get_user()`. Fall back
# to assuming we're running under an action
self_user = "web-flow"
# Collect all remediations from the report
req_remediations = list(
itertools.chain.from_iterable(
rem.get("requirements", {}).values()
for pkg_name, rem in obj.report["remediations"].items()
)
)
# Get all open pull requests for the repository
pulls = repo.get_pulls(state="open", sort="created", base=repo.default_branch)
pending_updates = set(
[
f"{canonicalize_name(req_rem['requirement']['name'])}{req_rem['requirement']['specifier']}"
for req_rem in req_remediations
]
)
created = 0
# TODO: Refactor this loop into a fn to iterate over remediations nicely
# Iterate over all requirements files and process each remediation
for name, contents in obj.requirements_files.items():
raw_contents = contents
contents = contents.decode("utf-8") # TODO - encoding?
parsed_req_file = requirements.RequirementFile(name, contents)
for remediation in req_remediations:
pkg = remediation["requirement"]["name"]
pkg_canonical_name: str = canonicalize_name(pkg)
analyzed_spec: str = remediation["requirement"]["specifier"]
# Skip remediations without a recommended version
if remediation["recommended_version"] is None:
LOG.debug(
f"The GitHub PR alerter only currently supports remediations that have a recommended_version: {pkg}"
)
continue
# We have a single remediation that can have multiple vulnerabilities
# Find all vulnerabilities associated with the remediation
vulns = [
x
for x in obj.report["vulnerabilities"]
if x["package_name"] == pkg_canonical_name
and x["analyzed_requirement"]["specifier"] == analyzed_spec
]
# Skip if all vulnerabilities have unknown severity and the ignore flag is set
if ignore_cvss_unknown_severity and all(
x["severity"] is None for x in vulns
):
LOG.debug(
"All vulnerabilities have unknown severity, and ignore_cvss_unknown_severity is set."
)
continue
highest_base_score = 0
for vuln in vulns:
if vuln["severity"] is not None:
highest_base_score = max(
highest_base_score,
(vuln["severity"].get("cvssv3", {}) or {}).get(
"base_score", 10
),
)
# Skip if none of the vulnerabilities meet the severity threshold
if ignore_cvss_severity_below:
at_least_one_match = False
for vuln in vulns:
# Consider a None severity as a match, since it's controlled by a different flag
# If we can't find a base_score but we have severity data, assume it's critical for now.
if (
vuln["severity"] is None
or (vuln["severity"].get("cvssv3", {}) or {}).get(
"base_score", 10
)
>= ignore_cvss_severity_below
):
at_least_one_match = True
if not at_least_one_match:
LOG.debug(
f"None of the vulnerabilities found have a score greater than or equal to the ignore_cvss_severity_below of {ignore_cvss_severity_below}"
)
continue
for parsed_req in parsed_req_file.requirements:
specs = (
SpecifierSet(">=0")
if parsed_req.specs == SpecifierSet("")
else parsed_req.specs
)
# Check if the requirement matches the remediation
if (
canonicalize_name(parsed_req.name) == pkg_canonical_name
and str(specs) == analyzed_spec
):
updated_contents = parsed_req.update_version(
contents, remediation["recommended_version"]
)
pending_updates.discard(f"{pkg_canonical_name}{analyzed_spec}")
new_branch = branch_prefix + utils.generate_branch_name(
pkg, remediation
)
skip_create = False
# Few possible cases:
# 1. No existing PRs exist for this change (don't need to handle)
# 2. An existing PR exists, and it's out of date (eg, recommended 0.5.1 and we want 0.5.2)
# 3. An existing PR exists, and it's not mergable anymore (eg, needs a rebase)
# 4. An existing PR exists, and everything's up to date.
# 5. An existing PR exists, but it's not needed anymore (perhaps we've been updated to a later version)
# 6. No existing PRs exist, but a branch does exist (perhaps the PR was closed but a stale branch left behind)
# In any case, we only act if we've been the only committer to the branch.
# Handle various cases for existing pull requests
for pr in pulls:
if not pr.head.ref.startswith(branch_prefix):
continue
authors = [
commit.committer.login for commit in pr.get_commits()
]
only_us = all([x == self_user for x in authors])
try:
_, pr_pkg, pr_spec, pr_ver = pr.head.ref.split("/")
except ValueError:
# It's possible that something weird has manually been done, so skip that
# Skip invalid branch names
LOG.debug(
"Found an invalid branch name on an open PR, that matches our prefix. Skipping."
)
continue
pr_pkg = canonicalize_name(pr_pkg)
if pr_pkg != pkg_canonical_name:
continue
# Case 4: An up-to-date PR exists
if (
pr_pkg == pkg_canonical_name
and pr_spec == analyzed_spec
and pr_ver == remediation["recommended_version"]
and pr.mergeable
):
LOG.debug(
f"An up to date PR #{pr.number} for {pkg} was found, no action will be taken."
)
skip_create = True
continue
if not only_us:
LOG.debug(
f"There are other committers on the PR #{pr.number} for {pkg}. No further action will be taken."
)
continue
# Case 2: An existing PR is out of date
if (
pr_pkg == pkg_canonical_name
and pr_spec == analyzed_spec
and pr_ver != remediation["recommended_version"]
):
LOG.debug(
f"Closing stale PR #{pr.number} for {pkg} as a newer recommended version became"
)
pr.create_issue_comment(
"This PR has been replaced, since a newer recommended version became available."
)
pr.edit(state="closed")
delete_branch(repo, pr.head.ref)
# Case 3: An existing PR is not mergeable
if not pr.mergeable:
LOG.debug(
f"Closing PR #{pr.number} for {pkg} as it has become unmergable and we were the only committer"
)
pr.create_issue_comment(
"This PR has been replaced since it became unmergable."
)
pr.edit(state="closed")
delete_branch(repo, pr.head.ref)
# Skip if no changes were made
if updated_contents == contents:
LOG.debug(
f"Couldn't update {pkg} to {remediation['recommended_version']}"
)
continue
# Skip creation if indicated
if skip_create:
continue
# Create a new branch and commit the changes
try:
create_branch(repo, repo.default_branch, new_branch)
except pygithub.GithubException as e:
if e.data["message"] == "Reference already exists":
# There might be a stale branch. If the bot is the only committer, nuke it.
comparison = repo.compare(repo.default_branch, new_branch)
authors = [
commit.committer.login for commit in comparison.commits
]
only_us = all([x == self_user for x in authors])
if only_us:
delete_branch(repo, new_branch)
create_branch(repo, repo.default_branch, new_branch)
else:
LOG.debug(
f"The branch '{new_branch}' already exists - but there is no matching PR and this branch has committers other than us. This remediation will be skipped."
)
continue
else:
raise e
try:
repo.update_file(
path=name,
message=utils.generate_commit_message(pkg, remediation),
content=updated_contents,
branch=new_branch,
sha=utils.git_sha1(raw_contents),
)
except pygithub.GithubException as e:
if "does not match" in e.data["message"]:
click.secho(
f"GitHub blocked a commit on our branch to the requirements file, {name}, as the local hash we computed didn't match the version on {repo.default_branch}. Make sure you're running safety against the latest code on your default branch.",
fg="red",
)
continue
else:
raise e
pr = repo.create_pull(
title=pr_prefix + utils.generate_title(pkg, remediation, vulns),
body=utils.generate_body(
pkg, remediation, vulns, api_key=obj.key
),
head=new_branch,
base=repo.default_branch,
)
LOG.debug(f"Created Pull Request to update {pkg}")
created += 1
# Add assignees and labels to the PR
for assignee in assignees:
pr.add_to_assignees(assignee)
for label in labels:
pr.add_to_labels(label)
if label_severity:
score_as_label = utils.cvss3_score_to_label(highest_base_score)
if score_as_label:
pr.add_to_labels(score_as_label)
if len(pending_updates) > 0:
click.secho(
"The following remediations were not followed: {}".format(
", ".join(pending_updates)
),
fg="red",
)
if created:
click.secho(
f"Safety successfully created {created} GitHub PR{'s' if created > 1 else ''} for repo {repo_name}"
)
else:
click.secho(
"No PRs created; please run the command with debug mode for more information."
)
@click.command()
@click.option("--repo", help="GitHub standard repo path (eg, my-org/my-project)")
@click.option("--token", help="GitHub Access Token")
@click.option(
"--base-url",
help="Optional custom Base URL, if you're using GitHub enterprise",
default=None,
)
@click.pass_obj
@utils.require_files_report # TODO: For now, it can be removed in the future to support env scans.
def github_issue(obj: Any, repo: str, token: str, base_url: Optional[str]) -> None:
"""
Create a GitHub Issue for any vulnerabilities found using PyUp's remediation data.
Normally, this is run by a GitHub action. If you're running this manually, ensure that your local repo is up to date and on HEAD - otherwise you'll see strange results.
Args:
obj (Any): The Click context object containing report data.
repo (str): The GitHub repository path.
token (str): The GitHub Access Token.
base_url (Optional[str]): Custom base URL for GitHub Enterprise, if applicable.
"""
LOG.info("github_issue")
if pygithub is None:
click.secho(
"pygithub is not installed. Did you install Safety with GitHub support? Try pip install safety[github]",
fg="red",
)
sys.exit(1)
# Load alert configurations from the policy
alert = obj.policy.get("alert", {}) or {}
security = alert.get("security", {}) or {}
config_issue = security.get("github-issue", {}) or {}
issue_prefix = config_issue.get("issue-prefix", "[PyUp] ")
assignees = config_issue.get("assignees", [])
labels = config_issue.get("labels", ["security"])
label_severity = config_issue.get("label-severity", True)
ignore_cvss_severity_below = config_issue.get("ignore-cvss-severity-below", 0)
ignore_cvss_unknown_severity = config_issue.get(
"ignore-cvss-unknown-severity", False
)
# Authenticate with GitHub
gh = pygithub.Github(token, **({"base_url": base_url} if base_url else {}))
repo_name = repo
repo = gh.get_repo(repo)
# Get all open issues for the repository
issues = list(repo.get_issues(state="open", sort="created"))
ISSUE_TITLE_REGEX = re.escape(issue_prefix) + r"Security Vulnerability in (.+)"
req_remediations = list(
itertools.chain.from_iterable(
rem.get("requirements", {}).values()
for pkg_name, rem in obj.report["remediations"].items()
)
)
created = 0
# Iterate over all requirements files and process each remediation
for name, contents in obj.requirements_files.items():
contents = contents.decode("utf-8") # TODO - encoding?
parsed_req_file = requirements.RequirementFile(name, contents)
for remediation in req_remediations:
pkg: str = remediation["requirement"]["name"]
pkg_canonical_name: str = canonicalize_name(pkg)
analyzed_spec: str = remediation["requirement"]["specifier"]
# Skip remediations without a recommended version
if remediation["recommended_version"] is None:
LOG.debug(
f"The GitHub Issue alerter only currently supports remediations that have a recommended_version: {pkg}"
)
continue
# We have a single remediation that can have multiple vulnerabilities
# Find all vulnerabilities associated with the remediation
vulns = [
x
for x in obj.report["vulnerabilities"]
if x["package_name"] == pkg_canonical_name
and x["analyzed_requirement"]["specifier"] == analyzed_spec
]
# Skip if all vulnerabilities have unknown severity and the ignore flag is set
if ignore_cvss_unknown_severity and all(
x["severity"] is None for x in vulns
):
LOG.debug(
"All vulnerabilities have unknown severity, and ignore_cvss_unknown_severity is set."
)
continue
highest_base_score = 0
for vuln in vulns:
if vuln["severity"] is not None:
highest_base_score = max(
highest_base_score,
(vuln["severity"].get("cvssv3", {}) or {}).get(
"base_score", 10
),
)
# Skip if none of the vulnerabilities meet the severity threshold
if ignore_cvss_severity_below:
at_least_one_match = False
for vuln in vulns:
# Consider a None severity as a match, since it's controlled by a different flag
# If we can't find a base_score but we have severity data, assume it's critical for now.
if (
vuln["severity"] is None
or (vuln["severity"].get("cvssv3", {}) or {}).get(
"base_score", 10
)
>= ignore_cvss_severity_below
):
at_least_one_match = True
break
if not at_least_one_match:
LOG.debug(
f"None of the vulnerabilities found have a score greater than or equal to the ignore_cvss_severity_below of {ignore_cvss_severity_below}"
)
continue
for parsed_req in parsed_req_file.requirements:
specs = (
SpecifierSet(">=0")
if parsed_req.specs == SpecifierSet("")
else parsed_req.specs
)
if (
canonicalize_name(parsed_req.name) == pkg_canonical_name
and str(specs) == analyzed_spec
):
skip = False
for issue in issues:
match = re.match(ISSUE_TITLE_REGEX, issue.title)
if match:
group = match.group(1)
if (
group == f"{pkg}{analyzed_spec}"
or group == f"{pkg_canonical_name}{analyzed_spec}"
):
skip = True
break
# For now, we just skip issues if they already exist - we don't try and update them.
# Skip if an issue already exists for this remediation
if skip:
LOG.debug(
f"An issue already exists for {pkg}{analyzed_spec} - skipping"
)
continue
# Create a new GitHub issue
pr = repo.create_issue(
title=issue_prefix
+ utils.generate_issue_title(pkg, remediation),
body=utils.generate_issue_body(
pkg, remediation, vulns, api_key=obj.key
),
)
created += 1
LOG.debug(f"Created issue to update {pkg}")
# Add assignees and labels to the issue
for assignee in assignees:
pr.add_to_assignees(assignee)
for label in labels:
pr.add_to_labels(label)
if label_severity:
score_as_label = utils.cvss3_score_to_label(highest_base_score)
if score_as_label:
pr.add_to_labels(score_as_label)
if created:
click.secho(
f"Safety successfully created {created} new GitHub Issue{'s' if created > 1 else ''} for repo {repo_name}"
)
else:
click.secho(
"No issues created; please run the command with debug mode for more information."
)

View File

@@ -0,0 +1,564 @@
# type: ignore
from __future__ import unicode_literals
from packaging.version import parse as parse_version
from packaging.specifiers import SpecifierSet
import requests
from typing import Any, Optional, Generator, Tuple, List
from safety.meta import get_meta_http_headers
from datetime import datetime
from dparse import parse, parser, updater, filetypes
from dparse.dependencies import Dependency
from dparse.parser import setuptools_parse_requirements_backport as parse_requirements
class RequirementFile(object):
"""
Class representing a requirements file with its content and metadata.
Attributes:
path (str): The file path.
content (str): The content of the file.
sha (Optional[str]): The SHA of the file.
"""
def __init__(self, path: str, content: str, sha: Optional[str] = None):
self.path = path
self.content = content
self.sha = sha
self._requirements: Optional[List] = None
self._other_files: Optional[List] = None
self._is_valid = None
self.is_pipfile = False
self.is_pipfile_lock = False
self.is_setup_cfg = False
def __str__(self) -> str:
return (
"RequirementFile(path='{path}', sha='{sha}', content='{content}')".format(
path=self.path,
content=self.content[:30] + "[truncated]"
if len(self.content) > 30
else self.content,
sha=self.sha,
)
)
@property
def is_valid(self) -> Optional[bool]:
"""
Checks if the requirements file is valid by parsing it.
Returns:
bool: True if the file is valid, False otherwise.
"""
if self._is_valid is None:
self._parse()
return self._is_valid
@property
def requirements(self) -> Optional[List]:
"""
Returns the list of requirements parsed from the file.
Returns:
List: The list of requirements.
"""
if not self._requirements:
self._parse()
return self._requirements
@property
def other_files(self) -> Optional[List]:
"""
Returns the list of other files resolved from the requirements file.
Returns:
List: The list of other files.
"""
if not self._other_files:
self._parse()
return self._other_files
@staticmethod
def parse_index_server(line: str) -> Optional[str]:
"""
Parses the index server from a given line.
Args:
line (str): The line to parse.
Returns:
str: The parsed index server.
"""
return parser.Parser.parse_index_server(line)
def _hash_parser(self, line: str) -> Optional[Tuple[str, List[str]]]:
"""
Parses the hashes from a given line.
Args:
line (str): The line to parse.
Returns:
List: The list of parsed hashes.
"""
return parser.Parser.parse_hashes(line)
def _parse_requirements_txt(self) -> None:
"""
Parses the requirements.txt file format.
"""
self.parse_dependencies(filetypes.requirements_txt)
def _parse_conda_yml(self) -> None:
"""
Parses the conda.yml file format.
"""
self.parse_dependencies(filetypes.conda_yml)
def _parse_tox_ini(self) -> None:
"""
Parses the tox.ini file format.
"""
self.parse_dependencies(filetypes.tox_ini)
def _parse_pipfile(self) -> None:
"""
Parses the Pipfile format.
"""
self.parse_dependencies(filetypes.pipfile)
self.is_pipfile = True
def _parse_pipfile_lock(self) -> None:
"""
Parses the Pipfile.lock format.
"""
self.parse_dependencies(filetypes.pipfile_lock)
self.is_pipfile_lock = True
def _parse_setup_cfg(self) -> None:
"""
Parses the setup.cfg format.
"""
self.parse_dependencies(filetypes.setup_cfg)
self.is_setup_cfg = True
def _parse(self) -> None:
"""
Parses the requirements file to extract dependencies and other files.
"""
self._requirements, self._other_files = [], []
if self.path.endswith(".yml") or self.path.endswith(".yaml"):
self._parse_conda_yml()
elif self.path.endswith(".ini"):
self._parse_tox_ini()
elif self.path.endswith("Pipfile"):
self._parse_pipfile()
elif self.path.endswith("Pipfile.lock"):
self._parse_pipfile_lock()
elif self.path.endswith("setup.cfg"):
self._parse_setup_cfg()
else:
self._parse_requirements_txt()
self._is_valid = len(self._requirements) > 0 or len(self._other_files) > 0
def parse_dependencies(self, file_type: str) -> None:
"""
Parses the dependencies from the content based on the file type.
Args:
file_type (str): The type of the file.
"""
result = parse(
self.content,
path=self.path,
sha=self.sha,
file_type=file_type,
marker=(
("pyup: ignore file", "pyup:ignore file"), # file marker
("pyup: ignore", "pyup:ignore"), # line marker
),
)
for dep in result.dependencies:
req = Requirement(
name=dep.name,
specs=dep.specs,
line=dep.line,
lineno=dep.line_numbers[0] if dep.line_numbers else 0,
extras=dep.extras,
file_type=file_type,
)
req.index_server = dep.index_server
if self.is_pipfile:
req.pipfile = self.path
req.hashes = dep.hashes
self._requirements.append(req)
self._other_files = result.resolved_files
def iter_lines(self, lineno: int = 0) -> Generator[str, None, None]:
"""
Iterates over lines in the content starting from a specific line number.
Args:
lineno (int): The line number to start from.
Yields:
str: The next line in the content.
"""
for line in self.content.splitlines()[lineno:]:
yield line
@classmethod
def resolve_file(cls, file_path: str, line: str) -> str:
"""
Resolves a file path from a given line.
Args:
file_path (str): The file path to resolve.
line (str): The line containing the file path.
Returns:
str: The resolved file path.
"""
return parser.Parser.resolve_file(file_path, line)
class Requirement(object):
"""
Class representing a single requirement.
Attributes:
name (str): The name of the requirement.
specs (SpecifierSet): The version specifiers for the requirement.
line (str): The line containing the requirement.
lineno (int): The line number of the requirement.
extras (List): The extras for the requirement.
file_type (str): The type of the file containing the requirement.
"""
def __init__(
self,
name: str,
specs: SpecifierSet,
line: str,
lineno: int,
extras: List,
file_type: str,
):
self.name = name
self.key = name.lower()
self.specs = specs
self.line = line
self.lineno = lineno
self.index_server = None
self.extras = extras
self.hashes = []
self.file_type = file_type
self.pipfile: Optional[str] = None
self.hashCmp = (
self.key,
self.specs,
frozenset(self.extras),
)
self._is_insecure = None
self._changelog = None
# Convert compatible releases to a range of versions
if (
len(self.specs._specs) == 1
and next(iter(self.specs._specs))._spec[0] == "~="
):
# convert compatible releases to something more easily consumed,
# e.g. '~=1.2.3' is equivalent to '>=1.2.3,<1.3.0', while '~=1.2'
# is equivalent to '>=1.2,<2.0'
min_version = next(iter(self.specs._specs))._spec[1]
max_version = list(parse_version(min_version).release)
max_version[-1] = 0
max_version[-2] = max_version[-2] + 1
max_version = ".".join(str(x) for x in max_version)
self.specs = SpecifierSet(">=%s,<%s" % (min_version, max_version))
def __eq__(self, other: Any) -> bool:
return isinstance(other, Requirement) and self.hashCmp == other.hashCmp
def __ne__(self, other: Any) -> bool:
return not self == other
def __str__(self) -> str:
return "Requirement.parse({line}, {lineno})".format(
line=self.line, lineno=self.lineno
)
def __repr__(self) -> str:
return self.__str__()
@property
def is_pinned(self) -> bool:
"""
Checks if the requirement is pinned to a specific version.
Returns:
bool: True if pinned, False otherwise.
"""
if (
len(self.specs._specs) == 1
and next(iter(self.specs._specs))._spec[0] == "=="
):
return True
return False
@property
def is_open_ranged(self) -> bool:
"""
Checks if the requirement has an open range of versions.
Returns:
bool: True if open ranged, False otherwise.
"""
if (
len(self.specs._specs) == 1
and next(iter(self.specs._specs))._spec[0] == ">="
):
return True
return False
@property
def is_ranged(self) -> bool:
"""
Checks if the requirement has a range of versions.
Returns:
bool: True if ranged, False otherwise.
"""
return len(self.specs._specs) >= 1 and not self.is_pinned
@property
def is_loose(self) -> bool:
"""
Checks if the requirement has no version specifiers.
Returns:
bool: True if loose, False otherwise.
"""
return len(self.specs._specs) == 0
@staticmethod
def convert_semver(version: str) -> dict:
"""
Converts a version string to a semantic version dictionary.
Args:
version (str): The version string.
Returns:
dict: The semantic version dictionary.
"""
semver = {"major": 0, "minor": 0, "patch": 0}
version_parts = version.split(".")
# don't be overly clever here. repitition makes it more readable and works exactly how
# it is supposed to
try:
semver["major"] = int(version_parts[0])
semver["minor"] = int(version_parts[1])
semver["patch"] = int(version_parts[2])
except (IndexError, ValueError):
pass
return semver
@property
def can_update_semver(self) -> bool:
"""
Checks if the requirement can be updated based on semantic versioning rules.
Returns:
bool: True if it can be updated, False otherwise.
"""
# return early if there's no update filter set
if "pyup: update" not in self.line:
return True
update = self.line.split("pyup: update")[1].strip().split("#")[0]
current_version = Requirement.convert_semver(
next(iter(self.specs._specs))._spec[1]
)
next_version = Requirement.convert_semver(self.latest_version) # type: ignore
if update == "major":
if current_version["major"] < next_version["major"]:
return True
elif update == "minor":
if (
current_version["major"] < next_version["major"]
or current_version["minor"] < next_version["minor"]
):
return True
return False
@property
def filter(self):
"""
Returns the filter for the requirement if specified.
Returns:
Optional[SpecifierSet]: The filter specifier set, or None if not specified.
"""
rqfilter = False
if "rq.filter:" in self.line:
rqfilter = self.line.split("rq.filter:")[1].strip().split("#")[0]
elif "pyup:" in self.line:
if "pyup: update" not in self.line:
rqfilter = self.line.split("pyup:")[1].strip().split("#")[0]
# unset the filter once the date set in 'until' is reached
if "until" in rqfilter:
rqfilter, until = [part.strip() for part in rqfilter.split("until")]
try:
until = datetime.strptime(until, "%Y-%m-%d")
if until < datetime.now():
rqfilter = False
except ValueError:
# wrong date formatting
pass
if rqfilter:
try:
(rqfilter,) = parse_requirements("filter " + rqfilter)
if len(rqfilter.specifier._specs) > 0:
return rqfilter.specifier
except ValueError:
pass
return False
@property
def version(self) -> Optional[str]:
"""
Returns the current version of the requirement.
Returns:
Optional[str]: The current version, or None if not pinned.
"""
if self.is_pinned:
return next(iter(self.specs._specs))._spec[1]
specs = self.specs
if self.filter:
specs = SpecifierSet(
",".join(
[
"".join(s._spec)
for s in list(specs._specs) + list(self.filter._specs)
]
)
)
return self.get_latest_version_within_specs( # type: ignore
specs,
versions=self.package.versions,
prereleases=self.prereleases, # type: ignore
)
def get_hashes(self, version: str) -> List:
"""
Retrieves the hashes for a specific version from PyPI.
Args:
version (str): The version to retrieve hashes for.
Returns:
List: A list of hashes for the specified version.
"""
headers = get_meta_http_headers()
r = requests.get(
"https://pypi.org/pypi/{name}/{version}/json".format(
name=self.key, version=version
),
headers=headers,
)
hashes = []
data = r.json()
for item in data.get("urls", {}):
sha256 = item.get("digests", {}).get("sha256", False)
if sha256:
hashes.append({"hash": sha256, "method": "sha256"})
return hashes
def update_version(
self, content: str, version: str, update_hashes: bool = True
) -> str:
"""
Updates the version of the requirement in the content.
Args:
content (str): The original content.
version (str): The new version to update to.
update_hashes (bool): Whether to update the hashes as well.
Returns:
str: The updated content.
"""
if self.file_type == filetypes.tox_ini:
updater_class = updater.ToxINIUpdater
elif self.file_type == filetypes.conda_yml:
updater_class = updater.CondaYMLUpdater
elif self.file_type == filetypes.requirements_txt:
updater_class = updater.RequirementsTXTUpdater
elif self.file_type == filetypes.pipfile:
updater_class = updater.PipfileUpdater
elif self.file_type == filetypes.pipfile_lock:
updater_class = updater.PipfileLockUpdater
elif self.file_type == filetypes.setup_cfg:
updater_class = updater.SetupCFGUpdater
else:
raise NotImplementedError
dep = Dependency(
name=self.name,
specs=self.specs,
line=self.line,
line_numbers=[
self.lineno,
]
if self.lineno != 0
else None,
dependency_type=self.file_type,
hashes=self.hashes,
extras=self.extras,
)
hashes = []
if self.hashes and update_hashes:
hashes = self.get_hashes(version)
return updater_class.update(
content=content, dependency=dep, version=version, hashes=hashes, spec="=="
)
@classmethod
def parse(
cls, s: str, lineno: int, file_type: str = filetypes.requirements_txt
) -> "Requirement":
"""
Parses a requirement from a line of text.
Args:
s (str): The line of text.
lineno (int): The line number.
file_type (str): The type of the file containing the requirement.
Returns:
Requirement: The parsed requirement.
"""
# setuptools requires a space before the comment. If this isn't the case, add it.
if "\t#" in s:
(parsed,) = parse_requirements(s.replace("\t#", "\t #"))
else:
(parsed,) = parse_requirements(s)
return cls(
name=parsed.name,
specs=parsed.specifier,
line=s,
lineno=lineno,
extras=list(parsed.extras),
file_type=file_type,
)

View File

@@ -0,0 +1,54 @@
Safety has detected a vulnerable package, [{{ pkg }}]({{ remediation['more_info_url'] }}), that should be updated from **{% if remediation['version'] %}{{ remediation['version'] }}{% else %}{{ remediation['requirement']['specifier'] }}{% endif %}** to **{{ remediation['recommended_version'] }}** to fix {{ vulns | length }} vulnerabilit{{ "y" if vulns|length == 1 else "ies" }}{% if overall_impact %}{{ " rated " + overall_impact if vulns|length == 1 else " with the highest CVSS severity rating being " + overall_impact }}{% endif %}.
To read more about the impact of {{ "this vulnerability" if vulns|length == 1 else "these vulnerabilities" }} see [PyUps {{ pkg }} page]({{ remediation['more_info_url'] }}).
{{ hint }}
If you're using `pip`, you can run:
```
pip install {{ pkg }}=={{ remediation['recommended_version'] }}
# Followed by a pip freeze
```
<details>
<summary>Vulnerabilities Found</summary>
{% for vuln in vulns %}
* {{ vuln.advisory }}
{% if vuln.severity and vuln.severity.cvssv3 and vuln.severity.cvssv3.base_severity %}
* This vulnerability was rated {{ vuln.severity.cvssv3.base_severity }} ({{ vuln.severity.cvssv3.base_score }}) on CVSSv3.
{% endif %}
* To read more about this vulnerability, see PyUps [vulnerability page]({{ vuln.more_info_url }})
{% endfor %}
</details>
<details>
<summary>Changelog from {{ remediation['requirement']['name'] }}{{ remediation['requirement']['specifier'] }} to {{ remediation['recommended_version'] }}</summary>
{% if summary_changelog %}
The full changelog is too long to post here. See [PyUps {{ pkg }} page]({{ remediation['more_info_url'] }}) for more information.
{% else %}
{% for version, log in changelog.items() %}
### {{ version }}
```
{{ log }}
```
{% endfor %}
{% endif %}
</details>
<details>
<summary>Ignoring {{ "This Vulnerability" if vulns|length == 1 else "These Vulnerabilities" }}</summary>
If you wish to [ignore this vulnerability](https://docs.pyup.io/docs/safety-20-policy-file), you can add the following to `.safety-policy.yml` in this repo:
```
security:
ignore-vulnerabilities:{% for vuln in vulns %}
{{ vuln.vulnerability_id }}:
reason: enter a reason as to why you're ignoring this vulnerability
expires: 'YYYY-MM-DD' # datetime string - date this ignore will expire
{% endfor %}
```
</details>

View File

@@ -0,0 +1,47 @@
Vulnerability fix: This PR updates [{{ pkg }}]({{ remediation['more_info_url'] }}) from **{% if remediation['version'] %}{{ remediation['version'] }}{% else %}{{ remediation['requirement']['specifier'] }}{% endif %}** to **{{ remediation['recommended_version'] }}** to fix {{ vulns | length }} vulnerabilit{{ "y" if vulns|length == 1 else "ies" }}{% if overall_impact %}{{ " rated " + overall_impact if vulns|length == 1 else " with the highest CVSS severity rating being " + overall_impact }}{% endif %}.
To read more about the impact of {{ "this vulnerability" if vulns|length == 1 else "these vulnerabilities" }} see [PyUps {{ pkg }} page]({{ remediation['more_info_url'] }}).
{{ hint }}
<details>
<summary>Vulnerabilities Fixed</summary>
{% for vuln in vulns %}
* {{ vuln.advisory }}
{% if vuln.severity and vuln.severity.cvssv3 and vuln.severity.cvssv3.base_severity %}
* This vulnerability was rated {{ vuln.severity.cvssv3.base_severity }} ({{ vuln.severity.cvssv3.base_score }}) on CVSSv3.
{% endif %}
* To read more about this vulnerability, see PyUps [vulnerability page]({{ vuln.more_info_url }})
{% endfor %}
</details>
<details>
<summary>Changelog</summary>
{% if summary_changelog %}
The full changelog is too long to post here. See [PyUps {{ pkg }} page]({{ remediation['more_info_url'] }}) for more information.
{% else %}
{% for version, log in changelog.items() %}
### {{ version }}
```
{{ log }}
```
{% endfor %}
{% endif %}
</details>
<details>
<summary>Ignoring {{ "This Vulnerability" if vulns|length == 1 else "These Vulnerabilities" }}</summary>
If you wish to [ignore this vulnerability](https://docs.pyup.io/docs/safety-20-policy-file), you can add the following to `.safety-policy.yml` in this repo:
```
security:
ignore-vulnerabilities:{% for vuln in vulns %}
{{ vuln.vulnerability_id }}:
reason: enter a reason as to why you're ignoring this vulnerability
expires: 'YYYY-MM-DD' # datetime string - date this ignore will expire
{% endfor %}
```
</details>

View File

@@ -0,0 +1,393 @@
# type: ignore
import hashlib
import os
import sys
from functools import wraps
from typing import Optional, List, Dict, Any
from packaging.version import parse as parse_version
from packaging.specifiers import SpecifierSet
from pathlib import Path
import click
# Jinja2 will only be installed if the optional deps are installed.
# It's fine if our functions fail, but don't let this top level
# import error out.
from safety.models import is_pinned_requirement
from safety.output_utils import (
get_unpinned_hint,
get_specifier_range_info,
get_fix_hint_for_unpinned,
)
try:
import jinja2
except ImportError:
jinja2 = None
import requests
from safety.meta import get_meta_http_headers
def highest_base_score(vulns: List[Dict[str, Any]]) -> float:
"""
Calculates the highest CVSS base score from a list of vulnerabilities.
Args:
vulns (List[Dict[str, Any]]): The list of vulnerabilities.
Returns:
float: The highest CVSS base score.
"""
highest_base_score = 0
for vuln in vulns:
if vuln["severity"] is not None:
highest_base_score = max(
highest_base_score,
(vuln["severity"].get("cvssv3", {}) or {}).get("base_score", 10),
)
return highest_base_score
def generate_branch_name(pkg: str, remediation: Dict[str, Any]) -> str:
"""
Generates a branch name for a given package and remediation.
Args:
pkg (str): The package name.
remediation (Dict[str, Any]): The remediation data.
Returns:
str: The generated branch name.
"""
return f"{pkg}/{remediation['requirement']['specifier']}/{remediation['recommended_version']}"
def generate_issue_title(pkg: str, remediation: Dict[str, Any]) -> str:
"""
Generates an issue title for a given package and remediation.
Args:
pkg (str): The package name.
remediation (Dict[str, Any]): The remediation data.
Returns:
str: The generated issue title.
"""
return f"Security Vulnerability in {pkg}{remediation['requirement']['specifier']}"
def get_hint(remediation: Dict[str, Any]) -> str:
"""
Generates a hint for a given remediation.
Args:
remediation (Dict[str, Any]): The remediation data.
Returns:
str: The generated hint.
"""
pinned = is_pinned_requirement(
SpecifierSet(remediation["requirement"]["specifier"])
)
hint = ""
if not pinned:
fix_hint = get_fix_hint_for_unpinned(remediation)
hint = (
f"{fix_hint}\n\n{get_unpinned_hint(remediation['requirement']['name'])} "
f"{get_specifier_range_info(style=False)}"
)
return hint
def generate_title(
pkg: str, remediation: Dict[str, Any], vulns: List[Dict[str, Any]]
) -> str:
"""
Generates a title for a pull request or issue.
Args:
pkg (str): The package name.
remediation (Dict[str, Any]): The remediation data.
vulns (List[Dict[str, Any]]): The list of vulnerabilities.
Returns:
str: The generated title.
"""
suffix = "y" if len(vulns) == 1 else "ies"
from_dependency = (
remediation["version"]
if remediation["version"]
else remediation["requirement"]["specifier"]
)
return f"Update {pkg} from {from_dependency} to {remediation['recommended_version']} to fix {len(vulns)} vulnerabilit{suffix}"
def generate_body(
pkg: str, remediation: Dict[str, Any], vulns: List[Dict[str, Any]], *, api_key: str
) -> Optional[str]:
"""
Generates the body content for a pull request.
Args:
pkg (str): The package name.
remediation (Dict[str, Any]): The remediation data.
vulns (List[Dict[str, Any]]): The list of vulnerabilities.
api_key (str): The API key for fetching changelog data.
Returns:
str: The generated body content.
"""
changelog = fetch_changelog(
pkg,
remediation["version"],
remediation["recommended_version"],
api_key=api_key,
from_spec=remediation.get("requirement", {}).get("specifier", None),
)
p = Path(__file__).parent / "templates"
env = jinja2.Environment(loader=jinja2.FileSystemLoader(Path(p))) # type: ignore
template = env.get_template("pr.jinja2")
overall_impact = cvss3_score_to_label(highest_base_score(vulns))
context = {
"pkg": pkg,
"remediation": remediation,
"vulns": vulns,
"changelog": changelog,
"overall_impact": overall_impact,
"summary_changelog": False,
"hint": get_hint(remediation),
}
result = template.render(context)
# GitHub has a PR body length limit of 65536. If we're going over that, skip the changelog and just use a link.
if len(result) < 65500:
return result
context["summary_changelog"] = True
return template.render(context)
def generate_issue_body(
pkg: str, remediation: Dict[str, Any], vulns: List[Dict[str, Any]], *, api_key: str
) -> Optional[str]:
"""
Generates the body content for an issue.
Args:
pkg (str): The package name.
remediation (Dict[str, Any]): The remediation data.
vulns (List[Dict[str, Any]]): The list of vulnerabilities.
api_key (str): The API key for fetching changelog data.
Returns:
str: The generated body content.
"""
changelog = fetch_changelog(
pkg,
remediation["version"],
remediation["recommended_version"],
api_key=api_key,
from_spec=remediation.get("requirement", {}).get("specifier", None),
)
p = Path(__file__).parent / "templates"
env = jinja2.Environment(loader=jinja2.FileSystemLoader(Path(p))) # type: ignore
template = env.get_template("issue.jinja2")
overall_impact = cvss3_score_to_label(highest_base_score(vulns))
context = {
"pkg": pkg,
"remediation": remediation,
"vulns": vulns,
"changelog": changelog,
"overall_impact": overall_impact,
"summary_changelog": False,
"hint": get_hint(remediation),
}
result = template.render(context)
# GitHub has a PR body length limit of 65536. If we're going over that, skip the changelog and just use a link.
if len(result) < 65500:
return result
context["summary_changelog"] = True
return template.render(context)
def generate_commit_message(pkg: str, remediation: Dict[str, Any]) -> str:
"""
Generates a commit message for a given package and remediation.
Args:
pkg (str): The package name.
remediation (Dict[str, Any]): The remediation data.
Returns:
str: The generated commit message.
"""
from_dependency = (
remediation["version"]
if remediation["version"]
else remediation["requirement"]["specifier"]
)
return (
f"Update {pkg} from {from_dependency} to {remediation['recommended_version']}"
)
def git_sha1(raw_contents: bytes) -> str:
"""
Calculates the SHA-1 hash of the given raw contents.
Args:
raw_contents (bytes): The raw contents to hash.
Returns:
str: The SHA-1 hash.
"""
return hashlib.sha1(
b"blob " + str(len(raw_contents)).encode("ascii") + b"\0" + raw_contents
).hexdigest()
def fetch_changelog(
package: str,
from_version: Optional[str],
to_version: str,
*,
api_key: str,
from_spec: Optional[str] = None,
) -> Dict[str, Any]:
"""
Fetches the changelog for a package from a specified version to another version.
Args:
package (str): The package name.
from_version (Optional[str]): The starting version.
to_version (str): The ending version.
api_key (str): The API key for fetching changelog data.
from_spec (Optional[str]): The specifier for the starting version.
Returns:
Dict[str, Any]: The fetched changelog data.
"""
to_version_parsed = parse_version(to_version)
if from_version:
from_version_parsed = parse_version(from_version)
else:
from_version_parsed = None
from_spec = SpecifierSet(from_spec)
changelog = {}
headers = {"X-Api-Key": api_key}
headers.update(get_meta_http_headers())
r = requests.get(
"https://pyup.io/api/v1/changelogs/{}/".format(package), headers=headers
)
if r.status_code == 200:
data = r.json()
if data:
# sort the changelog by release
sorted_log = sorted(
data.items(), key=lambda v: parse_version(v[0]), reverse=True
)
# go over each release and add it to the log if it's within the "upgrade
# range" e.g. update from 1.2 to 1.3 includes a changelog for 1.2.1 but
# not for 0.4.
for version, log in sorted_log:
parsed_version = parse_version(version)
version_check = from_version and (parsed_version > from_version_parsed)
spec_check = (
from_spec
and isinstance(from_spec, SpecifierSet)
and from_spec.contains(parsed_version)
)
if version_check or spec_check and parsed_version <= to_version_parsed:
changelog[version] = log
return changelog
def cvss3_score_to_label(score: float) -> Optional[str]:
"""
Converts a CVSS v3 score to a severity label.
Args:
score (float): The CVSS v3 score.
Returns:
Optional[str]: The severity label.
"""
if 0.1 <= score <= 3.9:
return "low"
elif 4.0 <= score <= 6.9:
return "medium"
elif 7.0 <= score <= 8.9:
return "high"
elif 9.0 <= score <= 10.0:
return "critical"
return None
def require_files_report(func):
@wraps(func)
def inner(obj: Any, *args: Any, **kwargs: Any) -> Any:
"""
Decorator that ensures a report is generated against a file.
Args:
obj (Any): The object containing the report.
*args (Any): Additional arguments.
**kwargs (Any): Additional keyword arguments.
Returns:
Any: The result of the decorated function.
"""
if obj.report["report_meta"]["scan_target"] != "files":
click.secho(
"This report was generated against an environment, but this alert command requires "
"a scan report that was generated against a file. To learn more about the "
"`safety alert` command visit https://docs.pyup.io/docs/safety-2-alerts",
fg="red",
)
sys.exit(1)
files = obj.report["report_meta"]["scanned"]
obj.requirements_files = {}
for f in files:
if not os.path.exists(f):
cwd = os.getcwd()
click.secho(
"A requirements file scanned in the report, {}, does not exist (looking in {}).".format(
f, cwd
),
fg="red",
)
sys.exit(1)
obj.requirements_files[f] = open(f, "rb").read()
return func(obj, *args, **kwargs)
return inner